diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml index d33de296906..2146bb70fd8 100644 --- a/hapi-deployable-pom/pom.xml +++ b/hapi-deployable-pom/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml index 1e0949da036..6c1b44706b1 100644 --- a/hapi-fhir-android/pom.xml +++ b/hapi-fhir-android/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml index 84536f66a91..9a173eed3eb 100644 --- a/hapi-fhir-base/pom.xml +++ b/hapi-fhir-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TaskChunker.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TaskChunker.java index 9514e059740..7b92d0832cf 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TaskChunker.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TaskChunker.java @@ -40,13 +40,22 @@ import java.util.stream.Stream; */ public class TaskChunker { - public void chunk(Collection theInput, int theChunkSize, Consumer> theBatchConsumer) { + public static void chunk(List theInput, int theChunkSize, Consumer> theBatchConsumer) { + if (theInput.size() <= theChunkSize) { + theBatchConsumer.accept(theInput); + return; + } + chunk((Collection) theInput, theChunkSize, theBatchConsumer); + } + + public static void chunk(Collection theInput, int theChunkSize, Consumer> theBatchConsumer) { List input; if (theInput instanceof List) { input = (List) theInput; } else { input = new ArrayList<>(theInput); } + for (int i = 0; i < input.size(); i += theChunkSize) { int to = i + theChunkSize; to = Math.min(to, input.size()); @@ -56,12 +65,11 @@ public class TaskChunker { } @Nonnull - public Stream> chunk(Stream theStream, int theChunkSize) { + public static Stream> chunk(Stream theStream, int theChunkSize) { return StreamUtil.partition(theStream, theChunkSize); } - @Nonnull - public void chunk(Iterator theIterator, int theChunkSize, Consumer> theListConsumer) { + public static void chunk(Iterator theIterator, int theChunkSize, Consumer> theListConsumer) { chunk(Streams.stream(theIterator), theChunkSize).forEach(theListConsumer); } } diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties index f338fc8fa1c..def641589f0 100644 --- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties +++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties @@ -87,6 +87,7 @@ ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFail ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check. ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload - URL "{0}" and value "{1}" +ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.cantUndeleteWithDeletesDisabled=Unable to restore previously deleted resource as deletes are disabled on this server. ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}" ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search @@ -200,6 +201,7 @@ ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder.invalidTar ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl.invalidResourceType=Invalid/unsupported resource type: "{0}" ca.uhn.fhir.jpa.dao.index.IdHelperService.nonUniqueForcedId=Non-unique ID specified, can not process request +ca.uhn.fhir.jpa.dao.index.IdHelperService.deletedId=Resource {0} has been deleted ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.noIdSupplied=No Partition ID supplied ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.missingPartitionIdOrName=Partition must have an ID and a Name diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/TaskChunkerTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/TaskChunkerTest.java index 38df69f017f..50f71fa79f0 100644 --- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/TaskChunkerTest.java +++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/TaskChunkerTest.java @@ -37,7 +37,7 @@ public class TaskChunkerTest { List input = newIntRangeList(0, 35); // Execute - new TaskChunker().chunk(input, 10, myConsumer); + TaskChunker.chunk(input, 10, myConsumer); // Verify verify(myConsumer, times(4)).accept(myConsumerCaptor.capture()); diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml index d8d738caf99..db25bc4f2ba 100644 --- a/hapi-fhir-bom/pom.xml +++ b/hapi-fhir-bom/pom.xml @@ -4,7 +4,7 @@ 4.0.0 ca.uhn.hapi.fhir hapi-fhir-bom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT pom HAPI FHIR BOM @@ -12,7 +12,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml index b4d72e5020f..6edc883df32 100644 --- a/hapi-fhir-checkstyle/pom.xml +++ b/hapi-fhir-checkstyle/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index 256dece5d36..d4a3f519428 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml index a0e66e2725c..e402e75a1e3 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-fhir-cli - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml index e32deda674c..1c93cfa5375 100644 --- a/hapi-fhir-cli/pom.xml +++ b/hapi-fhir-cli/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml index d299e34c731..eb1663c660b 100644 --- a/hapi-fhir-client-okhttp/pom.xml +++ b/hapi-fhir-client-okhttp/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml index 407addfc2ee..c743cad6eb5 100644 --- a/hapi-fhir-client/pom.xml +++ b/hapi-fhir-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml index 63f9fe95ade..16105895f4c 100644 --- a/hapi-fhir-converter/pom.xml +++ b/hapi-fhir-converter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml index 31519bd4300..bc99a2f88c7 100644 --- a/hapi-fhir-dist/pom.xml +++ b/hapi-fhir-dist/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml index f8b791bcc5c..27a94cebe28 100644 --- a/hapi-fhir-docs/pom.xml +++ b/hapi-fhir-docs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-dont-store-reslink-target-partition-date.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-dont-store-reslink-target-partition-date.yaml new file mode 100644 index 00000000000..db25abdd722 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-dont-store-reslink-target-partition-date.yaml @@ -0,0 +1,5 @@ +--- +type: change +issue: 6460 +title: "The HFJ_RES_LINK table with no longer store the `PARTITION_DATE` value for the indexed link target + resource, as this was an unused feature which has been removed as a part of a larger performance optimization." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-improve-transaction-performance-for-external-refs.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-improve-transaction-performance-for-external-refs.yaml new file mode 100644 index 00000000000..f67eb29bbf5 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-improve-transaction-performance-for-external-refs.yaml @@ -0,0 +1,9 @@ +--- +type: perf +issue: 6460 +title: "The JPA server FHIR transaction processor will now pre-fetch the target + resource state for references to resources that don't also appear in the + transaction bundle. This means that if you process a large FHIR transaction containing + many references to other resources in the repository that are not also being + updated in the same transaction, you should see a very significant improvement + in performance." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-improve-transaction-performance-for-ids.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-improve-transaction-performance-for-ids.yaml new file mode 100644 index 00000000000..11808b898fe --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-improve-transaction-performance-for-ids.yaml @@ -0,0 +1,7 @@ +--- +type: perf +issue: 6460 +title: "The JPA server FHIR transaction processor will now more aggressively cache + resource IDs for previously seen resources, reducing the number of database reads + required when processing transactions. This should provide a noticeable improvement + in performance when processing transactions which update pre-existing resources." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-prevent-undeleting-if-deletes-disabled.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-prevent-undeleting-if-deletes-disabled.yaml new file mode 100644 index 00000000000..9f9d20cd15f --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-prevent-undeleting-if-deletes-disabled.yaml @@ -0,0 +1,5 @@ +--- +type: change +issue: 6460 +title: "If deletes are disabled in the JPA server, it is no longer possible to un-delete + a resource (i.e. update a previously deleted resource to make it non-deleted)." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-version-delete-and-update-in-same-transaction-separately.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-version-delete-and-update-in-same-transaction-separately.yaml new file mode 100644 index 00000000000..9fdb5449ff8 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6460-version-delete-and-update-in-same-transaction-separately.yaml @@ -0,0 +1,9 @@ +--- +type: change +issue: 6460 +title: "When performing a FHIR Transaction which deletes and then updates (or otherwise + un-deletes) the same resource within a single transaction, the delete was previously + not stored as a distinct version (meaning that the resource version was only + incremented once, and no delete was actually stored in the resource history. This + has been changed so that deletes will always appear as a distinct entry in the + resource history." diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml index 676ceab42aa..1e106aaa69f 100644 --- a/hapi-fhir-jacoco/pom.xml +++ b/hapi-fhir-jacoco/pom.xml @@ -11,7 +11,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml index 1b392dd584a..fe17d290684 100644 --- a/hapi-fhir-jaxrsserver-base/pom.xml +++ b/hapi-fhir-jaxrsserver-base/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml index 6a91cfd67cf..3d91a9e4086 100644 --- a/hapi-fhir-jpa/pom.xml +++ b/hapi-fhir-jpa/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirLocalContainerEntityManagerFactoryBean.java b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirLocalContainerEntityManagerFactoryBean.java index fd38355ecd0..99024e56ec6 100644 --- a/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirLocalContainerEntityManagerFactoryBean.java +++ b/hapi-fhir-jpa/src/main/java/ca/uhn/fhir/jpa/config/HapiFhirLocalContainerEntityManagerFactoryBean.java @@ -20,7 +20,11 @@ package ca.uhn.fhir.jpa.config; import com.google.common.base.Strings; -import org.hibernate.cfg.AvailableSettings; +import jakarta.annotation.Nonnull; +import org.hibernate.cfg.BatchSettings; +import org.hibernate.cfg.JdbcSettings; +import org.hibernate.cfg.ManagedBeanSettings; +import org.hibernate.cfg.QuerySettings; import org.hibernate.query.criteria.ValueHandlingMode; import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; @@ -46,18 +50,19 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain myConfigurableListableBeanFactory = theConfigurableListableBeanFactory; } + @Nonnull @Override public Map getJpaPropertyMap() { Map retVal = super.getJpaPropertyMap(); // SOMEDAY these defaults can be set in the constructor. setJpaProperties does a merge. - if (!retVal.containsKey(AvailableSettings.CRITERIA_VALUE_HANDLING_MODE)) { - retVal.put(AvailableSettings.CRITERIA_VALUE_HANDLING_MODE, ValueHandlingMode.BIND); + if (!retVal.containsKey(QuerySettings.CRITERIA_VALUE_HANDLING_MODE)) { + retVal.put(QuerySettings.CRITERIA_VALUE_HANDLING_MODE, ValueHandlingMode.BIND); } - if (!retVal.containsKey(AvailableSettings.CONNECTION_HANDLING)) { + if (!retVal.containsKey(JdbcSettings.CONNECTION_HANDLING)) { retVal.put( - AvailableSettings.CONNECTION_HANDLING, + JdbcSettings.CONNECTION_HANDLING, PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_RELEASE_AFTER_TRANSACTION); } @@ -65,26 +70,25 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain * Set some performance options */ - if (!retVal.containsKey(AvailableSettings.STATEMENT_BATCH_SIZE)) { - retVal.put(AvailableSettings.STATEMENT_BATCH_SIZE, "30"); + if (!retVal.containsKey(BatchSettings.STATEMENT_BATCH_SIZE)) { + retVal.put(BatchSettings.STATEMENT_BATCH_SIZE, "30"); } - if (!retVal.containsKey(AvailableSettings.ORDER_INSERTS)) { - retVal.put(AvailableSettings.ORDER_INSERTS, "true"); + if (!retVal.containsKey(BatchSettings.ORDER_INSERTS)) { + retVal.put(BatchSettings.ORDER_INSERTS, "true"); } - if (!retVal.containsKey(AvailableSettings.ORDER_UPDATES)) { - retVal.put(AvailableSettings.ORDER_UPDATES, "true"); + if (!retVal.containsKey(BatchSettings.ORDER_UPDATES)) { + retVal.put(BatchSettings.ORDER_UPDATES, "true"); } - if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) { - retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true"); + if (!retVal.containsKey(BatchSettings.BATCH_VERSIONED_DATA)) { + retVal.put(BatchSettings.BATCH_VERSIONED_DATA, "true"); } // Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate - // needs - // in order to be able to resolve beans, so we add it back in manually here - if (!retVal.containsKey(AvailableSettings.BEAN_CONTAINER)) { - retVal.put(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory)); + // needs in order to be able to resolve beans, so we add it back in manually here + if (!retVal.containsKey(ManagedBeanSettings.BEAN_CONTAINER)) { + retVal.put(ManagedBeanSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory)); } return retVal; diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 5a4d190cc43..b7db9a6a5a5 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java index fca9611aed2..17a6833bd12 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java @@ -42,7 +42,6 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; import ca.uhn.fhir.mdm.dao.IMdmLinkDao; import ca.uhn.fhir.mdm.model.MdmPidTuple; @@ -59,6 +58,7 @@ import ca.uhn.fhir.util.ExtensionUtil; import ca.uhn.fhir.util.HapiExtensions; import ca.uhn.fhir.util.Logs; import ca.uhn.fhir.util.SearchParameterUtil; +import ca.uhn.fhir.util.TaskChunker; import jakarta.annotation.Nonnull; import jakarta.persistence.EntityManager; import org.apache.commons.lang3.StringUtils; @@ -315,8 +315,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor { // for each patient pid -> // search for the target resources, with their correct patient references, chunked. // The results will be jammed into myReadPids - QueryChunker queryChunker = new QueryChunker<>(); - queryChunker.chunk(expandedMemberResourceIds, QUERY_CHUNK_SIZE, (idChunk) -> { + TaskChunker.chunk(expandedMemberResourceIds, QUERY_CHUNK_SIZE, (idChunk) -> { try { queryResourceTypeWithReferencesToPatients(pids, idChunk, theParams, theDef); } catch (IOException ex) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java index 4bc72010bc3..f86ad9310be 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java @@ -1323,7 +1323,7 @@ public abstract class BaseHapiFhirDao extends BaseStora * the previous version entity. */ if (historyEntry == null) { - historyEntry = theEntity.toHistory(versionedTags); + historyEntry = theEntity.toHistory(versionedTags && theEntity.getDeleted() == null); } historyEntry.setEncoding(theChanged.getEncoding()); @@ -1331,7 +1331,7 @@ public abstract class BaseHapiFhirDao extends BaseStora historyEntry.setResourceTextVc(theChanged.getResourceText()); ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId()); - myResourceHistoryTableDao.save(historyEntry); + myEntityManager.persist(historyEntry); theEntity.setCurrentVersionEntity(historyEntry); // Save resource source @@ -1489,6 +1489,11 @@ public abstract class BaseHapiFhirDao extends BaseStora wasDeleted = theOldResource.isDeleted(); } + if (wasDeleted && !myStorageSettings.isDeleteEnabled()) { + String msg = myContext.getLocalizer().getMessage(BaseHapiFhirDao.class, "cantUndeleteWithDeletesDisabled"); + throw new InvalidRequestException(Msg.code(2573) + msg); + } + DaoMethodOutcome outcome = toMethodOutcome( theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType) .setCreated(wasDeleted); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java index f1d52bbd30b..c7fc08cae7c 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java @@ -43,6 +43,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.delete.DeleteConflictUtil; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; @@ -567,8 +568,11 @@ public abstract class BaseHapiFhirResourceDao extends B // Pre-cache the resource ID jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext)); - myIdHelperService.addResolvedPidToForcedId( - jpaPid, theRequestPartitionId, getResourceName(), entity.getFhirId(), null); + String fhirId = entity.getFhirId(); + if (fhirId == null) { + fhirId = Long.toString(entity.getId()); + } + myIdHelperService.addResolvedPidToFhirId(jpaPid, theRequestPartitionId, getResourceName(), fhirId, null); theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid); theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource); @@ -1736,8 +1740,13 @@ public abstract class BaseHapiFhirResourceDao extends B validateResourceTypeAndThrowInvalidRequestException(theId); BaseHasResource entity; - JpaPid pid = myIdHelperService.resolveResourcePersistentIds( - requestPartitionId, getResourceName(), theId.getIdPart()); + JpaPid pid = myIdHelperService + .resolveResourceIdentity( + requestPartitionId, + getResourceName(), + theId.getIdPart(), + ResolveIdentityMode.includeDeleted().cacheOk()) + .getPersistentId(); Set readPartitions = null; if (requestPartitionId.isAllPartitions()) { entity = myEntityManager.find(ResourceTable.class, pid.getId()); @@ -1779,10 +1788,6 @@ public abstract class BaseHapiFhirResourceDao extends B } } - if (entity == null) { - throw new ResourceNotFoundException(Msg.code(1996) + "Resource " + theId + " is not known"); - } - if (theId.hasVersionIdPart()) { if (!theId.isVersionIdPartValidLong()) { throw new ResourceNotFoundException(Msg.code(978) @@ -1822,7 +1827,10 @@ public abstract class BaseHapiFhirResourceDao extends B } } - Validate.notNull(entity); + if (entity == null) { + throw new ResourceNotFoundException(Msg.code(1996) + "Resource " + theId + " is not known"); + } + validateResourceType(entity); if (theCheckForForcedId) { @@ -1871,8 +1879,27 @@ public abstract class BaseHapiFhirResourceDao extends B } if (persistentId == null) { - persistentId = myIdHelperService.resolveResourcePersistentIds( - theRequestPartitionId, getResourceName(), theId.getIdPart()); + String resourceName = getResourceName(); + if (myStorageSettings.getResourceClientIdStrategy() + == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) { + if (theId.isIdPartValidLong()) { + /* + * If it's a pure numeric ID and we are in ALPHANUMERIC mode, then the number + * corresponds to a DB PID. In this case we want to resolve it regardless of + * which type the client has supplied. This is because DB PIDs are unique across + * all resource types (unlike FHIR_IDs which are namespaced to the resource type). + * We want to load the resource with that PID regardless of type because if + * the user is trying to update it we want to fail if the type is wrong, as + * opposed to trying to create a new instance. + */ + resourceName = null; + } + } + persistentId = myIdHelperService.resolveResourceIdentityPid( + theRequestPartitionId, + resourceName, + theId.getIdPart(), + ResolveIdentityMode.includeDeleted().cacheOk()); } ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java index 0aedafefed7..fde57c39836 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirSystemDao.java @@ -192,7 +192,7 @@ public abstract class BaseHapiFhirSystemDao extends B HapiTransactionService.requireTransaction(); List pids = theResolvedIds.stream().map(t -> ((JpaPid) t).getId()).collect(Collectors.toList()); - new QueryChunker().chunk(pids, idChunk -> { + QueryChunker.chunk(pids, idChunk -> { /* * Pre-fetch the resources we're touching in this transaction in mass - this reduced the diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java index 025f7e198b1..8255daf5486 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/HistoryBuilder.java @@ -81,7 +81,7 @@ public class HistoryBuilder { private FhirContext myCtx; @Autowired - private IIdHelperService myIdHelperService; + private IIdHelperService myIdHelperService; /** * Constructor @@ -150,13 +150,13 @@ public class HistoryBuilder { query.setMaxResults(theToIndex - theFromIndex); List tables = query.getResultList(); - if (tables.size() > 0) { + if (!tables.isEmpty()) { ImmutableListMultimap resourceIdToHistoryEntries = Multimaps.index(tables, ResourceHistoryTable::getResourceId); Set pids = resourceIdToHistoryEntries.keySet().stream() .map(JpaPid::fromId) .collect(Collectors.toSet()); - PersistentIdToForcedIdMap pidToForcedId = myIdHelperService.translatePidsToForcedIds(pids); + PersistentIdToForcedIdMap pidToForcedId = myIdHelperService.translatePidsToForcedIds(pids); ourLog.trace("Translated IDs: {}", pidToForcedId.getResourcePersistentIdOptionalMap()); for (Long nextResourceId : resourceIdToHistoryEntries.keySet()) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java index 270238da3fb..1acd2014769 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java @@ -242,13 +242,15 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry); - if (!retVal.isUnchangedInCurrentOperation()) { + if (thePerformIndexing) { + if (!retVal.isUnchangedInCurrentOperation()) { - org.hl7.fhir.r4.model.CodeSystem cs = myVersionCanonicalizer.codeSystemToCanonical(theResource); - addPidToResource(theEntity, cs); + org.hl7.fhir.r4.model.CodeSystem cs = myVersionCanonicalizer.codeSystemToCanonical(theResource); + addPidToResource(theEntity, cs); - myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded( - cs, (ResourceTable) theEntity, theRequest); + myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded( + cs, (ResourceTable) theEntity, theRequest); + } } return retVal; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoObservation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoObservation.java index 69088bc9699..eed06fe67da 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoObservation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoObservation.java @@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.dao; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -39,12 +41,15 @@ import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContextType; import jakarta.servlet.http.HttpServletResponse; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Observation; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.support.TransactionTemplate; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.TreeMap; public class JpaResourceDaoObservation extends BaseHapiFhirResourceDao @@ -138,13 +143,14 @@ public class JpaResourceDaoObservation extends BaseHapi patientParams.addAll(theSearchParameterMap.get(getSubjectParamName())); } + Map ids = new HashMap<>(); for (List nextPatientList : patientParams) { for (IQueryParameterType nextOr : nextPatientList) { if (nextOr instanceof ReferenceParam) { ReferenceParam ref = (ReferenceParam) nextOr; - JpaPid pid = myIdHelperService.resolveResourcePersistentIds( - requestPartitionId, ref.getResourceType(), ref.getIdPart()); - orderedSubjectReferenceMap.put(pid.getId(), nextOr); + IIdType id = myFhirContext.getVersion().newIdType(); + id.setParts(null, ref.getResourceType(), ref.getIdPart(), null); + ids.put(id, ref); } else { throw new IllegalArgumentException( Msg.code(942) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass()); @@ -152,6 +158,15 @@ public class JpaResourceDaoObservation extends BaseHapi } } + Map> resolvedIds = myIdHelperService.resolveResourceIdentities( + requestPartitionId, + ids.keySet(), + ResolveIdentityMode.includeDeleted().cacheOk()); + for (Map.Entry entry : ids.entrySet()) { + IResourceLookup lookup = resolvedIds.get(entry.getKey()); + orderedSubjectReferenceMap.put(lookup.getPersistentId().getId(), entry.getValue()); + } + theSearchParameterMap.remove(getSubjectParamName()); theSearchParameterMap.remove(getPatientParamName()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoValueSet.java index 442db1710ad..f70d17849d1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoValueSet.java @@ -303,12 +303,14 @@ public class JpaResourceDaoValueSet extends BaseHapiFhi theForceUpdate, theCreateNewHistoryEntry); - if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) { - if (retVal.getDeleted() == null) { - ValueSet valueSet = myVersionCanonicalizer.valueSetToCanonical(theResource); - myTerminologySvc.storeTermValueSet(retVal, valueSet); - } else { - myTerminologySvc.deleteValueSetAndChildren(retVal); + if (thePerformIndexing) { + if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) { + if (retVal.getDeleted() == null) { + ValueSet valueSet = myVersionCanonicalizer.valueSetToCanonical(theResource); + myTerminologySvc.storeTermValueSet(retVal, valueSet); + } else { + myTerminologySvc.deleteValueSetAndChildren(retVal); + } } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java index 86d9f727906..6e0280143bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessor.java @@ -26,25 +26,29 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken; import ca.uhn.fhir.jpa.model.entity.StorageSettings; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; -import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.param.TokenParam; +import ca.uhn.fhir.util.FhirTerser; import ca.uhn.fhir.util.ResourceReferenceInfo; import ca.uhn.fhir.util.StopWatch; +import ca.uhn.fhir.util.TaskChunker; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import jakarta.annotation.Nullable; import jakarta.persistence.EntityManager; +import jakarta.persistence.FlushModeType; import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContextType; import jakarta.persistence.PersistenceException; @@ -67,6 +71,7 @@ import org.springframework.context.ApplicationContext; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; @@ -83,6 +88,7 @@ public class TransactionProcessor extends BaseTransactionProcessor { public static final Pattern SINGLE_PARAMETER_MATCH_URL_PATTERN = Pattern.compile("^[^?]+[?][a-z0-9-]+=[^&,]+$"); private static final Logger ourLog = LoggerFactory.getLogger(TransactionProcessor.class); + public static final int CONDITIONAL_URL_FETCH_CHUNK_SIZE = 100; @Autowired private ApplicationContext myApplicationContext; @@ -146,25 +152,51 @@ public class TransactionProcessor extends BaseTransactionProcessor { List theEntries, StopWatch theTransactionStopWatch) { - ITransactionProcessorVersionAdapter versionAdapter = getVersionAdapter(); - RequestPartitionId requestPartitionId = - super.determineRequestPartitionIdForWriteEntries(theRequest, theEntries); + /* + * We temporarily set the flush mode for the duration of the DB transaction + * from the default of AUTO to the temporary value of COMMIT here. We do this + * because in AUTO mode, if any SQL SELECTs are required during the + * processing of an individual transaction entry, the server will flush the + * pending INSERTs/UPDATEs to the database before executing the SELECT. + * This hurts performance since we don't get the benefit of batching those + * write operations as much as possible. The tradeoff here is that we + * could theoretically have transaction operations which try to read + * data previously written in the same transaction, and they won't see it. + * This shouldn't actually be an issue anyhow - we pre-fetch conditional + * URLs and reference targets at the start of the transaction. But this + * tradeoff still feels worth it, since the most common use of transactions + * is for fast writing of data. + * + * Note that it's probably not necessary to reset it back, it should + * automatically go back to the default value after the transaction but + * we reset it just to be safe. + */ + FlushModeType initialFlushMode = myEntityManager.getFlushMode(); + try { + myEntityManager.setFlushMode(FlushModeType.COMMIT); - if (requestPartitionId != null) { - preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId); + ITransactionProcessorVersionAdapter versionAdapter = getVersionAdapter(); + RequestPartitionId requestPartitionId = + super.determineRequestPartitionIdForWriteEntries(theRequest, theEntries); + + if (requestPartitionId != null) { + preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId); + } + + return super.doTransactionWriteOperations( + theRequest, + theActionName, + theTransactionDetails, + theAllIds, + theIdSubstitutions, + theIdToPersistedOutcome, + theResponse, + theOriginalRequestOrder, + theEntries, + theTransactionStopWatch); + } finally { + myEntityManager.setFlushMode(initialFlushMode); } - - return super.doTransactionWriteOperations( - theRequest, - theActionName, - theTransactionDetails, - theAllIds, - theIdSubstitutions, - theIdToPersistedOutcome, - theResponse, - theOriginalRequestOrder, - theEntries, - theTransactionStopWatch); } private void preFetch( @@ -199,40 +231,100 @@ public class TransactionProcessor extends BaseTransactionProcessor { RequestPartitionId theRequestPartitionId, Set foundIds, List idsToPreFetch) { - List idsToPreResolve = new ArrayList<>(); + + FhirTerser terser = myFhirContext.newTerser(); + + // Key: The ID of the resource + // Value: TRUE if we should prefetch the existing resource details and all stored indexes, + // FALSE if we should prefetch only the identity (resource ID and deleted status) + Map idsToPreResolve = new HashMap<>(theEntries.size() * 3); + for (IBase nextEntry : theEntries) { IBaseResource resource = theVersionAdapter.getResource(nextEntry); if (resource != null) { String verb = theVersionAdapter.getEntryRequestVerb(myFhirContext, nextEntry); + + /* + * Pre-fetch any resources that are potentially being directly updated by ID + */ if ("PUT".equals(verb) || "PATCH".equals(verb)) { String requestUrl = theVersionAdapter.getEntryRequestUrl(nextEntry); - if (countMatches(requestUrl, '/') == 1 && countMatches(requestUrl, '?') == 0) { + if (countMatches(requestUrl, '?') == 0) { IIdType id = myFhirContext.getVersion().newIdType(); id.setValue(requestUrl); - idsToPreResolve.add(id); + IIdType unqualifiedVersionless = id.toUnqualifiedVersionless(); + idsToPreResolve.put(unqualifiedVersionless, Boolean.TRUE); + } + } + + /* + * Pre-fetch any resources that are referred to directly by ID (don't replace + * the TRUE flag with FALSE in case we're updating a resource but also + * pointing to that resource elsewhere in the bundle) + */ + if ("PUT".equals(verb) || "POST".equals(verb)) { + for (ResourceReferenceInfo referenceInfo : terser.getAllResourceReferences(resource)) { + IIdType reference = referenceInfo.getResourceReference().getReferenceElement(); + if (reference != null + && !reference.isLocal() + && !reference.isUuid() + && reference.hasResourceType() + && reference.hasIdPart() + && !reference.getValue().contains("?")) { + idsToPreResolve.putIfAbsent(reference.toUnqualifiedVersionless(), Boolean.FALSE); + } } } } } - List outcome = - myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, idsToPreResolve).stream() - .collect(Collectors.toList()); - for (JpaPid next : outcome) { - foundIds.add( - next.getAssociatedResourceId().toUnqualifiedVersionless().getValue()); - theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next); - if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY - || !next.getAssociatedResourceId().isIdPartValidLong()) { - idsToPreFetch.add(next.getId()); + + /* + * If all the entries in the pre-fetch ID map have a value of TRUE, this + * means we only have IDs associated with resources we're going to directly + * update/patch within the transaction. In that case, it's fine to include + * deleted resources, since updating them will bring them back to life. + * + * If we have any FALSE entries, we're also pre-fetching reference targets + * which means we don't want deleted resources, because those are not OK + * to reference. + */ + boolean preFetchIncludesReferences = idsToPreResolve.values().stream().anyMatch(t -> !t); + ResolveIdentityMode resolveMode = preFetchIncludesReferences + ? ResolveIdentityMode.excludeDeleted().noCacheUnlessDeletesDisabled() + : ResolveIdentityMode.includeDeleted().cacheOk(); + + Map> outcomes = myIdHelperService.resolveResourceIdentities( + theRequestPartitionId, idsToPreResolve.keySet(), resolveMode); + for (Map.Entry> entry : outcomes.entrySet()) { + JpaPid next = (JpaPid) entry.getValue().getPersistentId(); + IIdType unqualifiedVersionlessId = entry.getKey(); + foundIds.add(unqualifiedVersionlessId.getValue()); + theTransactionDetails.addResolvedResourceId(unqualifiedVersionlessId, next); + if (idsToPreResolve.get(unqualifiedVersionlessId) == Boolean.TRUE) { + if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY + || (next.getAssociatedResourceId() != null + && !next.getAssociatedResourceId().isIdPartValidLong())) { + idsToPreFetch.add(next.getId()); + } } } - for (IIdType next : idsToPreResolve) { - if (!foundIds.contains(next.toUnqualifiedVersionless().getValue())) { + + // Any IDs that could not be resolved are presumably not there, so + // cache that fact so we don't look again later + for (IIdType next : idsToPreResolve.keySet()) { + if (!foundIds.contains(next.getValue())) { theTransactionDetails.addResolvedResourceId(next.toUnqualifiedVersionless(), null); } } } + @Override + protected void handleVerbChangeInTransactionWriteOperations() { + super.handleVerbChangeInTransactionWriteOperations(); + + myEntityManager.flush(); + } + private void preFetchConditionalUrls( TransactionDetails theTransactionDetails, List theEntries, @@ -274,12 +366,10 @@ public class TransactionProcessor extends BaseTransactionProcessor { } } - new QueryChunker() - .chunk( - searchParameterMapsToResolve, - 100, - map -> preFetchSearchParameterMaps( - theTransactionDetails, theRequestPartitionId, map, idsToPreFetch)); + TaskChunker.chunk( + searchParameterMapsToResolve, + CONDITIONAL_URL_FETCH_CHUNK_SIZE, + map -> preFetchSearchParameterMaps(theTransactionDetails, theRequestPartitionId, map, idsToPreFetch)); } /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java index 9e81e87f598..c4542ea8aac 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/index/IdHelperService.java @@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; @@ -35,11 +36,12 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.search.builder.SearchBuilder; import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.util.QueryChunker; -import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; +import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; +import ca.uhn.fhir.util.TaskChunker; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ListMultimap; import com.google.common.collect.MultimapBuilder; @@ -60,11 +62,12 @@ import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.IdType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.support.TransactionSynchronizationManager; -import java.time.LocalDate; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -79,6 +82,7 @@ import java.util.Set; import java.util.stream.Collectors; import static ca.uhn.fhir.jpa.search.builder.predicate.BaseJoiningPredicateBuilder.replaceDefaultPartitionIdIfNonNull; +import static ca.uhn.fhir.model.primitive.IdDt.isValidLong; import static org.apache.commons.lang3.StringUtils.isNotBlank; /** @@ -102,6 +106,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank; public class IdHelperService implements IIdHelperService { public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0]; public static final String RESOURCE_PID = "RESOURCE_PID"; + private static final Logger ourLog = LoggerFactory.getLogger(IdHelperService.class); @Autowired protected IResourceTableDao myResourceTableDao; @@ -128,20 +133,6 @@ public class IdHelperService implements IIdHelperService { myDontCheckActiveTransactionForUnitTest = theDontCheckActiveTransactionForUnitTest; } - /** - * Given a forced ID, convert it to its Long value. Since you are allowed to use string IDs for resources, we need to - * convert those to the underlying Long values that are stored, for lookup and comparison purposes. - * - * @throws ResourceNotFoundException If the ID can not be found - */ - @Override - @Nonnull - public IResourceLookup resolveResourceIdentity( - @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId) - throws ResourceNotFoundException { - return resolveResourceIdentity(theRequestPartitionId, theResourceType, theResourceId, false); - } - /** * Given a forced ID, convert it to its Long value. Since you are allowed to use string IDs for resources, we need to * convert those to the underlying Long values that are stored, for lookup and comparison purposes. @@ -153,48 +144,236 @@ public class IdHelperService implements IIdHelperService { @Nonnull public IResourceLookup resolveResourceIdentity( @Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - final String theResourceId, - boolean theExcludeDeleted) + @Nullable String theResourceType, + @Nonnull final String theResourceId, + @Nonnull ResolveIdentityMode theMode) throws ResourceNotFoundException { - assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive() - : "no transaction active"; - String resourceIdToUse = theResourceId; - if (resourceIdToUse.contains("/")) { - resourceIdToUse = theResourceId.substring(resourceIdToUse.indexOf("/") + 1); + IIdType id; + if (theResourceType != null) { + id = newIdType(theResourceType + "/" + theResourceId); + } else { + id = newIdType(theResourceId); } - IdDt id = new IdDt(theResourceType, resourceIdToUse); - Map>> matches = - translateForcedIdToPids(theRequestPartitionId, Collections.singletonList(id), theExcludeDeleted); + List ids = List.of(id); + Map> outcome = resolveResourceIdentities(theRequestPartitionId, ids, theMode); // We only pass 1 input in so only 0..1 will come back - if (matches.isEmpty() || !matches.containsKey(resourceIdToUse)) { + if (!outcome.containsKey(id)) { throw new ResourceNotFoundException(Msg.code(2001) + "Resource " + id + " is not known"); } - if (matches.size() > 1 || matches.get(resourceIdToUse).size() > 1) { - /* - * This means that: - * 1. There are two resources with the exact same resource type and forced id - * 2. The unique constraint on this column-pair has been dropped - */ - String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId"); - throw new PreconditionFailedException(Msg.code(1099) + msg); + return outcome.get(id); + } + + @Nonnull + @Override + public Map> resolveResourceIdentities( + @Nonnull RequestPartitionId theRequestPartitionId, + Collection theIds, + ResolveIdentityMode theMode) { + assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive() + : "no transaction active"; + + if (theIds.isEmpty()) { + return new HashMap<>(); } - return matches.get(resourceIdToUse).get(0); + Collection ids = new ArrayList<>(theIds); + ids.forEach(id -> Validate.isTrue(id.hasIdPart())); + + RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId); + ListMultimap> idToLookup = + MultimapBuilder.hashKeys(theIds.size()).arrayListValues(1).build(); + + // Do we have any FHIR ID lookups cached for any of the IDs + if (theMode.isUseCache(myStorageSettings.isDeleteEnabled()) && !ids.isEmpty()) { + resolveResourceIdentitiesForFhirIdsUsingCache(requestPartitionId, theMode, ids, idToLookup); + } + + // We still haven't found IDs, let's look them up in the DB + if (!ids.isEmpty()) { + resolveResourceIdentitiesForFhirIdsUsingDatabase(requestPartitionId, ids, idToLookup); + } + + // Convert the multimap into a simple map + Map> retVal = new HashMap<>(); + for (Map.Entry> next : idToLookup.entries()) { + if (next.getValue().getDeleted() != null) { + if (theMode.isFailOnDeleted()) { + String msg = myFhirCtx + .getLocalizer() + .getMessageSanitized( + IdHelperService.class, + "deletedId", + next.getKey().getValue()); + throw new ResourceGoneException(Msg.code(2572) + msg); + } + if (!theMode.isIncludeDeleted()) { + continue; + } + } + + IResourceLookup previousValue = retVal.put(next.getKey(), next.getValue()); + if (previousValue != null) { + /* + * This means that either: + * 1. There are two resources with the exact same resource type and forced + * id. The most likely reason for that is that someone is performing a + * multi-partition search and there are resources on each partition + * with the same ID. + * 2. The unique constraint on the FHIR_ID column has been dropped + */ + ourLog.warn( + "Resource ID[{}] corresponds to lookups: {} and {}", + next.getKey(), + previousValue, + next.getValue()); + String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId"); + throw new PreconditionFailedException(Msg.code(1099) + msg); + } + } + + return retVal; } /** - * Returns a mapping of Id -> IResourcePersistentId. - * If any resource is not found, it will throw ResourceNotFound exception (and no map will be returned) + * Fetch the resource identity ({@link IResourceLookup}) for a collection of + * resource IDs from the internal memory cache if possible. Note that we only + * use cached results if deletes are disabled on the server (since it is + * therefore not possible that we have an entry in the cache that has since + * been deleted but the cache doesn't know about the deletion), or if we + * aren't excluding deleted results anyhow. + * + * @param theRequestPartitionId The partition(s) to search + * @param theIdsToResolve The IDs we should look up. Any IDs that are resolved + * will be removed from this list. Any IDs remaining in + * the list after calling this method still haven't + * been attempted to be resolved. + * @param theMapToPopulate The results will be populated into this map */ - @Override - @Nonnull - public Map resolveResourcePersistentIds( - @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List theIds) { - return resolveResourcePersistentIds(theRequestPartitionId, theResourceType, theIds, false); + private void resolveResourceIdentitiesForFhirIdsUsingCache( + @Nonnull RequestPartitionId theRequestPartitionId, + ResolveIdentityMode theMode, + Collection theIdsToResolve, + ListMultimap> theMapToPopulate) { + for (Iterator idIterator = theIdsToResolve.iterator(); idIterator.hasNext(); ) { + IIdType nextForcedId = idIterator.next(); + MemoryCacheService.ForcedIdCacheKey nextKey = new MemoryCacheService.ForcedIdCacheKey( + nextForcedId.getResourceType(), nextForcedId.getIdPart(), theRequestPartitionId); + if (theMode.isUseCache(myStorageSettings.isDeleteEnabled())) { + List> cachedLookups = myMemoryCacheService.getIfPresent( + MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey); + if (cachedLookups != null && !cachedLookups.isEmpty()) { + idIterator.remove(); + for (IResourceLookup cachedLookup : cachedLookups) { + if (theMode.isIncludeDeleted() || cachedLookup.getDeleted() == null) { + theMapToPopulate.put(nextKey.toIdType(myFhirCtx), cachedLookup); + } + } + } + } + } + } + + /** + * Fetch the resource identity ({@link IResourceLookup}) for a collection of + * resource IDs from the database + * + * @param theRequestPartitionId The partition(s) to search + * @param theIdsToResolve The IDs we should look up + * @param theMapToPopulate The results will be populated into this map + */ + private void resolveResourceIdentitiesForFhirIdsUsingDatabase( + RequestPartitionId theRequestPartitionId, + Collection theIdsToResolve, + ListMultimap> theMapToPopulate) { + + /* + * If we have more than a threshold of IDs, we need to chunk the execution to + * avoid having too many parameters in one SQL statement + */ + int maxPageSize = (SearchBuilder.getMaximumPageSize() / 2) - 10; + if (theIdsToResolve.size() > maxPageSize) { + TaskChunker.chunk( + theIdsToResolve, + maxPageSize, + chunk -> resolveResourceIdentitiesForFhirIdsUsingDatabase( + theRequestPartitionId, chunk, theMapToPopulate)); + return; + } + + CriteriaBuilder cb = myEntityManager.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = cb.createTupleQuery(); + Root from = criteriaQuery.from(ResourceTable.class); + criteriaQuery.multiselect( + from.get("myId"), + from.get("myResourceType"), + from.get("myFhirId"), + from.get("myDeleted"), + from.get("myPartitionIdValue")); + + List outerAndPredicates = new ArrayList<>(2); + if (!theRequestPartitionId.isAllPartitions()) { + getOptionalPartitionPredicate(theRequestPartitionId, cb, from).ifPresent(outerAndPredicates::add); + } + + // one create one clause per id. + List innerIdPredicates = new ArrayList<>(theIdsToResolve.size()); + boolean haveUntypedIds = false; + for (IIdType next : theIdsToResolve) { + if (!next.hasResourceType()) { + haveUntypedIds = true; + } + + List idPredicates = new ArrayList<>(2); + + if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC + && next.isIdPartValidLong()) { + Predicate typeCriteria = cb.equal(from.get("myId"), next.getIdPartAsLong()); + idPredicates.add(typeCriteria); + } else { + if (isNotBlank(next.getResourceType())) { + Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType()); + idPredicates.add(typeCriteria); + } + Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart()); + idPredicates.add(idCriteria); + } + + innerIdPredicates.add(cb.and(idPredicates.toArray(EMPTY_PREDICATE_ARRAY))); + } + outerAndPredicates.add(cb.or(innerIdPredicates.toArray(EMPTY_PREDICATE_ARRAY))); + + criteriaQuery.where(cb.and(outerAndPredicates.toArray(EMPTY_PREDICATE_ARRAY))); + TypedQuery query = myEntityManager.createQuery(criteriaQuery); + List results = query.getResultList(); + for (Tuple nextId : results) { + // Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending. + Long resourcePid = nextId.get(0, Long.class); + String resourceType = nextId.get(1, String.class); + String fhirId = nextId.get(2, String.class); + Date deletedAd = nextId.get(3, Date.class); + Integer partitionId = nextId.get(4, Integer.class); + if (resourcePid != null) { + JpaResourceLookup lookup = new JpaResourceLookup( + resourceType, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null)); + + MemoryCacheService.ForcedIdCacheKey nextKey = + new MemoryCacheService.ForcedIdCacheKey(resourceType, fhirId, theRequestPartitionId); + IIdType id = nextKey.toIdType(myFhirCtx); + theMapToPopulate.put(id, lookup); + + if (haveUntypedIds) { + id = nextKey.toIdTypeWithoutResourceType(myFhirCtx); + theMapToPopulate.put(id, lookup); + } + + List> valueToCache = theMapToPopulate.get(id); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey, valueToCache); + } + } } /** @@ -208,7 +387,7 @@ public class IdHelperService implements IIdHelperService { @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List theIds, - boolean theExcludeDeleted) { + ResolveIdentityMode theMode) { assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); Validate.notNull(theIds, "theIds cannot be null"); Validate.isTrue(!theIds.isEmpty(), "theIds must not be empty"); @@ -224,7 +403,7 @@ public class IdHelperService implements IIdHelperService { // is a forced id // we must resolve! if (myStorageSettings.isDeleteEnabled()) { - retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theExcludeDeleted) + retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theMode) .getPersistentId(); retVals.put(id, retVal); } else { @@ -249,18 +428,6 @@ public class IdHelperService implements IIdHelperService { return retVals; } - /** - * Given a resource type and ID, determines the internal persistent ID for the resource. - * - * @throws ResourceNotFoundException If the ID can not be found - */ - @Override - @Nonnull - public JpaPid resolveResourcePersistentIds( - @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) { - return resolveResourcePersistentIds(theRequestPartitionId, theResourceType, theId, false); - } - /** * Given a resource type and ID, determines the internal persistent ID for the resource. * Optionally filters out deleted resources. @@ -273,11 +440,11 @@ public class IdHelperService implements IIdHelperService { @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId, - boolean theExcludeDeleted) { + ResolveIdentityMode theMode) { Validate.notNull(theId, "theId must not be null"); Map retVal = resolveResourcePersistentIds( - theRequestPartitionId, theResourceType, Collections.singletonList(theId), theExcludeDeleted); + theRequestPartitionId, theResourceType, Collections.singletonList(theId), theMode); return retVal.get(theId); // should be only one } @@ -359,11 +526,11 @@ public class IdHelperService implements IIdHelperService { idsToCheck.add(nextId); } - new QueryChunker() - .chunk( - idsToCheck, - SearchBuilder.getMaximumPageSize() / 2, - ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal)); + new QueryChunker(); + TaskChunker.chunk( + idsToCheck, + SearchBuilder.getMaximumPageSize() / 2, + ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal)); } return retVal; @@ -430,18 +597,30 @@ public class IdHelperService implements IIdHelperService { RequestPartitionId theRequestPartitionId, CriteriaBuilder cb, Root from) { if (myPartitionSettings.isAllowUnqualifiedCrossPartitionReference()) { return Optional.empty(); - } else if (theRequestPartitionId.isDefaultPartition() && myPartitionSettings.getDefaultPartitionId() == null) { - Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue")); - return Optional.of(partitionIdCriteria); - } else if (!theRequestPartitionId.isAllPartitions()) { + } else if (theRequestPartitionId.isAllPartitions()) { + return Optional.empty(); + } else { List partitionIds = theRequestPartitionId.getPartitionIds(); partitionIds = replaceDefaultPartitionIdIfNonNull(myPartitionSettings, partitionIds); - if (partitionIds.size() > 1) { - Predicate partitionIdCriteria = from.get("myPartitionIdValue").in(partitionIds); - return Optional.of(partitionIdCriteria); - } else if (partitionIds.size() == 1) { - Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue"), partitionIds.get(0)); - return Optional.of(partitionIdCriteria); + if (partitionIds.contains(null)) { + Predicate partitionIdNullCriteria = + from.get("myPartitionIdValue").isNull(); + if (partitionIds.size() == 1) { + return Optional.of(partitionIdNullCriteria); + } else { + Predicate partitionIdCriteria = from.get("myPartitionIdValue") + .in(partitionIds.stream().filter(t -> t != null).collect(Collectors.toList())); + return Optional.of(cb.or(partitionIdCriteria, partitionIdNullCriteria)); + } + } else { + if (partitionIds.size() > 1) { + Predicate partitionIdCriteria = + from.get("myPartitionIdValue").in(partitionIds); + return Optional.of(partitionIdCriteria); + } else if (partitionIds.size() == 1) { + Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue"), partitionIds.get(0)); + return Optional.of(partitionIdCriteria); + } } } return Optional.empty(); @@ -475,6 +654,7 @@ public class IdHelperService implements IIdHelperService { return retVal; } + @SuppressWarnings("OptionalAssignedToNull") @Override public Optional translatePidIdToForcedIdWithCache(JpaPid theId) { // do getIfPresent and then put to avoid doing I/O inside the cache. @@ -492,112 +672,6 @@ public class IdHelperService implements IIdHelperService { return forcedId; } - private ListMultimap organizeIdsByResourceType(Collection theIds) { - ListMultimap typeToIds = - MultimapBuilder.hashKeys().arrayListValues().build(); - for (IIdType nextId : theIds) { - if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY - || !isValidPid(nextId)) { - if (nextId.hasResourceType()) { - typeToIds.put(nextId.getResourceType(), nextId.getIdPart()); - } else { - typeToIds.put("", nextId.getIdPart()); - } - } - } - return typeToIds; - } - - private Map>> translateForcedIdToPids( - @Nonnull RequestPartitionId theRequestPartitionId, Collection theId, boolean theExcludeDeleted) { - theId.forEach(id -> Validate.isTrue(id.hasIdPart())); - - if (theId.isEmpty()) { - return new HashMap<>(); - } - - Map>> retVal = new HashMap<>(); - RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId); - - if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) { - List pids = theId.stream() - .filter(t -> isValidPid(t)) - .map(IIdType::getIdPartAsLong) - .collect(Collectors.toList()); - if (!pids.isEmpty()) { - resolvePids(requestPartitionId, pids, retVal); - } - } - - // returns a map of resourcetype->id - ListMultimap typeToIds = organizeIdsByResourceType(theId); - for (Map.Entry> nextEntry : typeToIds.asMap().entrySet()) { - String nextResourceType = nextEntry.getKey(); - Collection nextIds = nextEntry.getValue(); - - if (!myStorageSettings.isDeleteEnabled()) { - for (Iterator forcedIdIterator = nextIds.iterator(); forcedIdIterator.hasNext(); ) { - String nextForcedId = forcedIdIterator.next(); - String nextKey = nextResourceType + "/" + nextForcedId; - IResourceLookup cachedLookup = - myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey); - if (cachedLookup != null) { - forcedIdIterator.remove(); - retVal.computeIfAbsent(nextForcedId, id -> new ArrayList<>()) - .add(cachedLookup); - } - } - } - - if (!nextIds.isEmpty()) { - Collection views; - assert isNotBlank(nextResourceType); - - if (requestPartitionId.isAllPartitions()) { - views = myResourceTableDao.findAndResolveByForcedIdWithNoType( - nextResourceType, nextIds, theExcludeDeleted); - } else { - if (requestPartitionId.isDefaultPartition()) { - views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartitionNull( - nextResourceType, nextIds, theExcludeDeleted); - } else if (requestPartitionId.hasDefaultPartitionId()) { - views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId( - nextResourceType, - nextIds, - requestPartitionId.getPartitionIdsWithoutDefault(), - theExcludeDeleted); - } else { - views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartition( - nextResourceType, nextIds, requestPartitionId.getPartitionIds(), theExcludeDeleted); - } - } - - for (Object[] next : views) { - String resourceType = (String) next[0]; - Long resourcePid = (Long) next[1]; - String forcedId = (String) next[2]; - Date deletedAt = (Date) next[3]; - Integer partitionId = (Integer) next[4]; - LocalDate partitionDate = (LocalDate) next[5]; - - JpaResourceLookup lookup = new JpaResourceLookup( - resourceType, - resourcePid, - deletedAt, - PartitionablePartitionId.with(partitionId, partitionDate)); - retVal.computeIfAbsent(forcedId, id -> new ArrayList<>()).add(lookup); - - if (!myStorageSettings.isDeleteEnabled()) { - String key = resourceType + "/" + forcedId; - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, key, lookup); - } - } - } - } - - return retVal; - } - public RequestPartitionId replaceDefault(RequestPartitionId theRequestPartitionId) { if (myPartitionSettings.getDefaultPartitionId() != null) { if (!theRequestPartitionId.isAllPartitions() && theRequestPartitionId.hasDefaultPartitionId()) { @@ -610,59 +684,6 @@ public class IdHelperService implements IIdHelperService { return theRequestPartitionId; } - private void resolvePids( - @Nonnull RequestPartitionId theRequestPartitionId, - List thePidsToResolve, - Map>> theTargets) { - if (!myStorageSettings.isDeleteEnabled()) { - for (Iterator forcedIdIterator = thePidsToResolve.iterator(); forcedIdIterator.hasNext(); ) { - Long nextPid = forcedIdIterator.next(); - String nextKey = Long.toString(nextPid); - IResourceLookup cachedLookup = - myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey); - if (cachedLookup != null) { - forcedIdIterator.remove(); - theTargets.computeIfAbsent(nextKey, id -> new ArrayList<>()).add(cachedLookup); - } - } - } - - if (!thePidsToResolve.isEmpty()) { - Collection lookup; - if (theRequestPartitionId.isAllPartitions()) { - lookup = myResourceTableDao.findLookupFieldsByResourcePid(thePidsToResolve); - } else { - if (theRequestPartitionId.isDefaultPartition()) { - lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionNull(thePidsToResolve); - } else if (theRequestPartitionId.hasDefaultPartitionId()) { - lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIdsOrNullPartition( - thePidsToResolve, theRequestPartitionId.getPartitionIdsWithoutDefault()); - } else { - lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIds( - thePidsToResolve, theRequestPartitionId.getPartitionIds()); - } - } - lookup.stream() - .map(t -> new JpaResourceLookup( - (String) t[0], - (Long) t[1], - (Date) t[2], - PartitionablePartitionId.with((Integer) t[3], (LocalDate) t[4]))) - .forEach(t -> { - String id = t.getPersistentId().toString(); - if (!theTargets.containsKey(id)) { - theTargets.put(id, new ArrayList<>()); - } - theTargets.get(id).add(t); - if (!myStorageSettings.isDeleteEnabled()) { - String nextKey = t.getPersistentId().toString(); - myMemoryCacheService.putAfterCommit( - MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, t); - } - }); - } - } - @Override public PersistentIdToForcedIdMap translatePidsToForcedIds(Set theResourceIds) { assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive(); @@ -673,7 +694,7 @@ public class IdHelperService implements IIdHelperService { List remainingPids = thePids.stream().filter(t -> !retVal.containsKey(t)).collect(Collectors.toList()); - new QueryChunker().chunk(remainingPids, t -> { + QueryChunker.chunk(remainingPids, t -> { List resourceEntities = myResourceTableDao.findAllById(t); for (ResourceTable nextResourceEntity : resourceEntities) { @@ -701,33 +722,39 @@ public class IdHelperService implements IIdHelperService { * Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods */ @Override - public void addResolvedPidToForcedId( - JpaPid theJpaPid, + public void addResolvedPidToFhirId( + @Nonnull JpaPid theJpaPid, @Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - @Nullable String theForcedId, + @Nonnull String theResourceType, + @Nonnull String theFhirId, @Nullable Date theDeletedAt) { - if (theForcedId != null) { - if (theJpaPid.getAssociatedResourceId() == null) { - populateAssociatedResourceId(theResourceType, theForcedId, theJpaPid); - } - - myMemoryCacheService.putAfterCommit( - MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, - theJpaPid.getId(), - Optional.of(theResourceType + "/" + theForcedId)); - String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theForcedId); - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid); - } else { - myMemoryCacheService.putAfterCommit( - MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theJpaPid.getId(), Optional.empty()); + if (theJpaPid.getAssociatedResourceId() == null) { + populateAssociatedResourceId(theResourceType, theFhirId, theJpaPid); } - if (!myStorageSettings.isDeleteEnabled()) { - JpaResourceLookup lookup = new JpaResourceLookup( - theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId()); - String nextKey = theJpaPid.toString(); - myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, lookup); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, + theJpaPid.getId(), + Optional.of(theResourceType + "/" + theFhirId)); + String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theFhirId); + myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid); + + JpaResourceLookup lookup = new JpaResourceLookup( + theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId()); + + MemoryCacheService.ForcedIdCacheKey fhirIdKey = + new MemoryCacheService.ForcedIdCacheKey(theResourceType, theFhirId, theRequestPartitionId); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, fhirIdKey, List.of(lookup)); + + // If it's a pure-numeric ID, store it in the cache without a type as well + // so that we can resolve it this way when loading entities for update + if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC + && isValidLong(theFhirId)) { + MemoryCacheService.ForcedIdCacheKey fhirIdKeyWithoutType = + new MemoryCacheService.ForcedIdCacheKey(null, theFhirId, theRequestPartitionId); + myMemoryCacheService.putAfterCommit( + MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, fhirIdKeyWithoutType, List.of(lookup)); } } @@ -736,19 +763,6 @@ public class IdHelperService implements IIdHelperService { myPartitionSettings = thePartitionSettings; } - public static boolean isValidPid(IIdType theId) { - if (theId == null) { - return false; - } - - String idPart = theId.getIdPart(); - return isValidPid(idPart); - } - - public static boolean isValidPid(String theIdPart) { - return StringUtils.isNumeric(theIdPart); - } - @Override @Nonnull public List getPidsOrThrowException( @@ -764,7 +778,11 @@ public class IdHelperService implements IIdHelperService { if (resourceId == null) { IIdType id = theResource.getIdElement(); try { - retVal = resolveResourcePersistentIds(theRequestPartitionId, id.getResourceType(), id.getIdPart()); + retVal = resolveResourceIdentityPid( + theRequestPartitionId, + id.getResourceType(), + id.getIdPart(), + ResolveIdentityMode.includeDeleted().cacheOk()); } catch (ResourceNotFoundException e) { retVal = null; } @@ -836,4 +854,23 @@ public class IdHelperService implements IIdHelperService { public JpaPid newPidFromStringIdAndResourceName(String thePid, String theResourceName) { return JpaPid.fromIdAndResourceType(Long.parseLong(thePid), theResourceName); } + + private IIdType newIdType(String theValue) { + IIdType retVal = myFhirCtx.getVersion().newIdType(); + retVal.setValue(theValue); + return retVal; + } + + public static boolean isValidPid(IIdType theId) { + if (theId == null) { + return false; + } + + String idPart = theId.getIdPart(); + return isValidPid(idPart); + } + + public static boolean isValidPid(String theIdPart) { + return StringUtils.isNumeric(theIdPart); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java index 60b14fa3e0c..a967259e176 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermValueSet.java @@ -267,13 +267,14 @@ public class TermValueSet implements Serializable { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) .append("id", myId) .append("url", myUrl) - .append(myResource != null ? ("resource=" + myResource.toString()) : ("resource=(null)")) + .append("version", myVersion) .append("resourcePid", myResourcePid) .append("name", myName) - .append(myConcepts != null ? ("concepts - size=" + myConcepts.size()) : ("concepts=(null)")) + .append(myConcepts != null ? ("conceptCount=" + myConcepts.size()) : ("concepts=(null)")) .append("totalConcepts", myTotalConcepts) .append("totalConceptDesignations", myTotalConceptDesignations) .append("expansionStatus", myExpansionStatus) + .append(myResource != null ? ("resId=" + myResource) : ("resource=(null)")) .toString(); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java index 891617aa007..d20f61a7c6b 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/model/cross/JpaResourceLookup.java @@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.model.cross; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; +import org.apache.commons.lang3.builder.ToStringBuilder; +import org.apache.commons.lang3.builder.ToStringStyle; import java.util.Date; @@ -59,4 +61,14 @@ public class JpaResourceLookup implements IResourceLookup { return jpaPid; } + + @Override + public String toString() { + return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) + .append("resType", myResourceType) + .append("resPid", myResourcePid) + .append("deletedAt", myDeletedAt) + .append("partId", myPartitionablePartitionId) + .toString(); + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index a7e9ebc32f9..acd8fe0e5a3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -36,6 +36,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.dao.BaseStorageDao; @@ -49,6 +50,7 @@ import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; import ca.uhn.fhir.jpa.entity.ResourceSearchView; import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; @@ -84,6 +86,7 @@ import ca.uhn.fhir.rest.api.SortOrderEnum; import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; import ca.uhn.fhir.rest.api.server.RequestDetails; +import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import ca.uhn.fhir.rest.param.BaseParamWithPrefix; import ca.uhn.fhir.rest.param.DateParam; import ca.uhn.fhir.rest.param.DateRangeParam; @@ -117,6 +120,7 @@ import org.apache.commons.lang3.math.NumberUtils; import org.apache.commons.lang3.tuple.Pair; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -533,10 +537,8 @@ public class SearchBuilder implements ISearchBuilder { throw new InvalidRequestException(Msg.code(2027) + "LastN operation is not enabled on this service, can not process this request"); } - return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() - .map(lastNResourceId -> myIdHelperService.resolveResourcePersistentIds( - myRequestPartitionId, myResourceName, String.valueOf(lastNResourceId))) - .collect(Collectors.toList()); + List persistentIds = myFulltextSearchSvc.lastN(myParams, theMaximumResults); + return persistentIds.stream().map(t -> (JpaPid) t).collect(Collectors.toList()); } else { throw new InvalidRequestException( Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); @@ -557,7 +559,13 @@ public class SearchBuilder implements ISearchBuilder { idParamValue = idParm.getValue(); } - pid = myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, myResourceName, idParamValue); + pid = myIdHelperService + .resolveResourceIdentity( + myRequestPartitionId, + myResourceName, + idParamValue, + ResolveIdentityMode.includeDeleted().cacheOk()) + .getPersistentId(); } return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); } @@ -579,37 +587,46 @@ public class SearchBuilder implements ISearchBuilder { /** * Combs through the params for any _id parameters and extracts the PIDs for them - * - * @param theTargetPids */ private void extractTargetPidsFromIdParams(Set theTargetPids) { // get all the IQueryParameterType objects // for _id -> these should all be StringParam values - HashSet ids = new HashSet<>(); + HashSet ids = new HashSet<>(); List> params = myParams.get(IAnyResource.SP_RES_ID); for (List paramList : params) { for (IQueryParameterType param : paramList) { + String id; if (param instanceof StringParam) { // we expect all _id values to be StringParams - ids.add(((StringParam) param).getValue()); + id = ((StringParam) param).getValue(); } else if (param instanceof TokenParam) { - ids.add(((TokenParam) param).getValue()); + id = ((TokenParam) param).getValue(); } else { // we do not expect the _id parameter to be a non-string value throw new IllegalArgumentException( Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); } + + IIdType idType = myContext.getVersion().newIdType(); + if (id.contains("/")) { + idType.setValue(id); + } else { + idType.setValue(myResourceName + "/" + id); + } + ids.add(idType); } } // fetch our target Pids // this will throw if an id is not found - Map idToPid = myIdHelperService.resolveResourcePersistentIds( - myRequestPartitionId, myResourceName, new ArrayList<>(ids)); + Map> idToIdentity = myIdHelperService.resolveResourceIdentities( + myRequestPartitionId, + new ArrayList<>(ids), + ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); // add the pids to targetPids - for (JpaPid pid : idToPid.values()) { - theTargetPids.add(pid.getId()); + for (IResourceLookup pid : idToIdentity.values()) { + theTargetPids.add((Long) pid.getPersistentId().getId()); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java index 2c2c5bed160..b58b0acf2ab 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/ResourceIdPredicateBuilder.java @@ -21,24 +21,27 @@ package ca.uhn.fhir.jpa.search.builder.predicate; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.util.QueryParameterUtils; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParamModifier; -import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import com.healthmarketscience.sqlbuilder.Condition; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; import jakarta.annotation.Nullable; -import org.hl7.fhir.r4.model.IdType; +import org.hl7.fhir.instance.model.api.IIdType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; @@ -68,9 +71,8 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { Set allOrPids = null; SearchFilterParser.CompareOperation defaultOperation = SearchFilterParser.CompareOperation.eq; - boolean allIdsAreForcedIds = true; for (List nextValue : theValues) { - Set orPids = new HashSet<>(); + Set ids = new LinkedHashSet<>(); boolean haveValue = false; for (IQueryParameterType next : nextValue) { String value = next.getValueAsQueryToken(getFhirContext()); @@ -78,21 +80,14 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { value = value.substring(1); } - IdType valueAsId = new IdType(value); if (isNotBlank(value)) { - if (!myIdHelperService.idRequiresForcedId(valueAsId.getIdPart()) && allIdsAreForcedIds) { - allIdsAreForcedIds = false; - } haveValue = true; - try { - boolean excludeDeleted = true; - JpaPid pid = myIdHelperService.resolveResourcePersistentIds( - theRequestPartitionId, theResourceName, valueAsId.getIdPart(), excludeDeleted); - orPids.add(pid); - } catch (ResourceNotFoundException e) { - // This is not an error in a search, it just results in no matches - ourLog.debug("Resource ID {} was requested but does not exist", valueAsId.getIdPart()); + if (!value.contains("/")) { + value = theResourceName + "/" + value; } + IIdType id = getFhirContext().getVersion().newIdType(); + id.setValue(value); + ids.add(id); } if (next instanceof TokenParam) { @@ -101,6 +96,20 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { } } } + + Set orPids = new HashSet<>(); + + // We're joining this to a query that will explicitly ask for non-deleted, + // so we really only want the PID and can safely cache (even if a previously + // deleted status was cached, since it might now be undeleted) + Map> resolvedPids = myIdHelperService.resolveResourceIdentities( + theRequestPartitionId, + ids, + ResolveIdentityMode.includeDeleted().cacheOk()); + for (IResourceLookup lookup : resolvedPids.values()) { + orPids.add(lookup.getPersistentId()); + } + if (haveValue) { if (allOrPids == null) { allOrPids = orPids; @@ -122,17 +131,19 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder { List resourceIds = JpaPid.toLongList(allOrPids); if (theSourceJoinColumn == null) { - BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(!allIdsAreForcedIds); + BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(true); Condition predicate; switch (operation) { default: case eq: predicate = queryRootTable.createPredicateResourceIds(false, resourceIds); - return queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); + break; case ne: predicate = queryRootTable.createPredicateResourceIds(true, resourceIds); - return queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); + break; } + predicate = queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate); + return predicate; } else { DbColumn resIdColumn = getResourceIdColumn(theSourceJoinColumn); return QueryParameterUtils.toEqualToOrInPredicate( diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java index 433f3f5d01c..f1024fac50a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java @@ -25,6 +25,7 @@ import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; @@ -382,8 +383,11 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { // Note that this creates the TermCodeSystem and TermCodeSystemVersion entities if needed IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource, theRequest); - JpaPid codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds( - RequestPartitionId.allPartitions(), csId.getResourceType(), csId.getIdPart()); + JpaPid codeSystemResourcePid = myIdHelperService.resolveResourceIdentityPid( + RequestPartitionId.allPartitions(), + csId.getResourceType(), + csId.getIdPart(), + ResolveIdentityMode.includeDeleted().cacheOk()); ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid.getId()); ourLog.info("CodeSystem resource has ID: {}", csId.getValue()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java index e9f07b798a5..8cd06745910 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermReadSvcImpl.java @@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.config.util.ConnectionPoolInfoProvider; import ca.uhn.fhir.jpa.config.util.IConnectionPoolInfoProvider; @@ -470,7 +471,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { myCodeSystemCurrentVersionCache.invalidateAll(); } - public void deleteValueSetForResource(ResourceTable theResourceTable) { + public Optional deleteValueSetForResource(ResourceTable theResourceTable) { // Get existing entity so it can be deleted. Optional optionalExistingTermValueSetById = myTermValueSetDao.findByResourcePid(theResourceTable.getId()); @@ -481,8 +482,19 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { ourLog.info("Deleting existing TermValueSet[{}] and its children...", existingTermValueSet.getId()); deletePreCalculatedValueSetContents(existingTermValueSet); myTermValueSetDao.deleteById(existingTermValueSet.getId()); + + /* + * If we're updating an existing ValueSet within a transaction, we need to make + * sure to manually flush now since otherwise we'll try to create a new + * TermValueSet entity and fail with a constraint error on the URL, since + * this one won't be deleted yet + */ + myTermValueSetDao.flush(); + ourLog.info("Done deleting existing TermValueSet[{}] and its children.", existingTermValueSet.getId()); } + + return optionalExistingTermValueSetById; } private void deletePreCalculatedValueSetContents(TermValueSet theValueSet) { @@ -2081,10 +2093,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } private JpaPid getValueSetResourcePersistentId(ValueSet theValueSet) { - return myIdHelperService.resolveResourcePersistentIds( + return myIdHelperService.resolveResourceIdentityPid( RequestPartitionId.allPartitions(), theValueSet.getIdElement().getResourceType(), - theValueSet.getIdElement().getIdPart()); + theValueSet.getIdElement().getIdPart(), + ResolveIdentityMode.includeDeleted().cacheOk()); } protected IValidationSupport.CodeValidationResult validateCodeIsInPreExpandedValueSet( @@ -2527,6 +2540,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { @Override @Transactional public void storeTermValueSet(ResourceTable theResourceTable, ValueSet theValueSet) { + // If we're in a transaction, we need to flush now so that we can correctly detect + // duplicates if there are multiple ValueSets in the same TX with the same URL + // (which is an error, but we need to catch it). It'd be better to catch this by + // inspecting the URLs in the bundle or something, since flushing hurts performance + // but it's not expected that loading valuesets is going to be a huge high frequency + // thing so it probably doesn't matter + myEntityManager.flush(); ValidateUtil.isTrueOrThrowInvalidRequest(theResourceTable != null, "No resource supplied"); if (isPlaceholder(theValueSet)) { @@ -2552,7 +2572,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { termValueSet.setName(theValueSet.hasName() ? theValueSet.getName() : null); // Delete version being replaced - deleteValueSetForResource(theResourceTable); + Optional deletedTrmValueSet = deleteValueSetForResource(theResourceTable); /* * Do the upload. @@ -2560,11 +2580,17 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { String url = termValueSet.getUrl(); String version = termValueSet.getVersion(); Optional optionalExistingTermValueSetByUrl; - if (version != null) { - optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndVersion(url, version); + + if (deletedTrmValueSet.isPresent() + && Objects.equals(deletedTrmValueSet.get().getUrl(), url) + && Objects.equals(deletedTrmValueSet.get().getVersion(), version)) { + // If we just deleted the valueset marker, we don't need to check if it exists + // in the database + optionalExistingTermValueSetByUrl = Optional.empty(); } else { - optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(url); + optionalExistingTermValueSetByUrl = getTermValueSet(version, url); } + if (optionalExistingTermValueSetByUrl.isEmpty()) { myTermValueSetDao.save(termValueSet); @@ -2602,6 +2628,16 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs { } } + private Optional getTermValueSet(String version, String url) { + Optional optionalExistingTermValueSetByUrl; + if (version != null) { + optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndVersion(url, version); + } else { + optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(url); + } + return optionalExistingTermValueSetByUrl; + } + @Override @Transactional public IFhirResourceDaoCodeSystem.SubsumesResult subsumes( diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryChunker.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryChunker.java index 2d19dbd3e80..634fe7e522a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryChunker.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/QueryChunker.java @@ -35,11 +35,11 @@ import java.util.stream.Stream; */ public class QueryChunker extends TaskChunker { - public void chunk(Collection theInput, Consumer> theBatchConsumer) { + public static void chunk(Collection theInput, Consumer> theBatchConsumer) { chunk(theInput, SearchBuilder.getMaximumPageSize(), theBatchConsumer); } - public Stream> chunk(Stream theStream) { + public static Stream> chunk(Stream theStream) { return chunk(theStream, SearchBuilder.getMaximumPageSize()); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java index a96a38325b3..9036b351453 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java @@ -3,36 +3,39 @@ package ca.uhn.fhir.jpa.dao.index; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; -import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import jakarta.persistence.EntityManager; -import jakarta.persistence.Tuple; -import jakarta.persistence.criteria.Path; -import jakarta.persistence.criteria.Root; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.CriteriaQuery; +import org.hibernate.sql.results.internal.TupleImpl; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.ArgumentMatchers; +import org.mockito.Answers; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Spy; import org.mockito.junit.jupiter.MockitoExtension; +import java.util.Date; import java.util.List; import java.util.Map; +import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -40,7 +43,6 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) public class IdHelperServiceTest { @@ -60,29 +62,33 @@ public class IdHelperServiceTest { @Mock private MemoryCacheService myMemoryCacheService; - @Mock + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private EntityManager myEntityManager; @Mock private PartitionSettings myPartitionSettings; - @BeforeEach + @Mock + private TypedQuery myTypedQuery; + + @BeforeEach void setUp() { myHelperSvc.setDontCheckActiveTransactionForUnitTest(true); // lenient because some tests require this setup, and others do not lenient().doReturn(true).when(myStorageSettings).isDeleteEnabled(); - lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy(); } @Test public void testResolveResourcePersistentIds() { + lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy(); + //prepare params RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A"); String resourceType = "Patient"; Long id = 123L; List ids = List.of(String.valueOf(id)); - boolean theExcludeDeleted = false; + ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled(); //prepare results Patient expectedPatient = new Patient(); @@ -91,19 +97,21 @@ public class IdHelperServiceTest { // configure mock behaviour when(myStorageSettings.isDeleteEnabled()).thenReturn(true); - final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted)); + final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode)); assertEquals("HAPI-2001: Resource Patient/123 is not known", resourceNotFoundException.getMessage()); } @Test public void testResolveResourcePersistentIdsDeleteFalse() { + lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy(); + //prepare Params RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A"); Long id = 123L; String resourceType = "Patient"; List ids = List.of(String.valueOf(id)); String forcedId = "(all)/" + resourceType + "/" + id; - boolean theExcludeDeleted = false; + ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled(); //prepare results Patient expectedPatient = new Patient(); @@ -112,7 +120,7 @@ public class IdHelperServiceTest { // configure mock behaviour when(myStorageSettings.isDeleteEnabled()).thenReturn(false); - Map actualIds = myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted); + Map actualIds = myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode); //verifyResult assertFalse(actualIds.isEmpty()); @@ -120,20 +128,35 @@ public class IdHelperServiceTest { } - private Root getMockedFrom() { - @SuppressWarnings("unchecked") - Path path = mock(Path.class); - @SuppressWarnings("unchecked") - Root from = mock(Root.class); - when(from.get(ArgumentMatchers.any())).thenReturn(path); - return from; - } - private List getMockedTupleList(Long idNumber, String resourceType, String id) { - Tuple tuple = mock(Tuple.class); - when(tuple.get(eq(0), eq(Long.class))).thenReturn(idNumber); - when(tuple.get(eq(1), eq(String.class))).thenReturn(resourceType); - when(tuple.get(eq(2), eq(String.class))).thenReturn(id); - return List.of(tuple); - } + + + @Test + public void testResolveResourceIdentity_defaultFunctionality(){ + lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC).when(myStorageSettings).getResourceClientIdStrategy(); + + RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition"); + String resourceType = "Patient"; + String resourceForcedId = "AAA"; + + Object[] tuple = new Object[] { + 1L, + "Patient", + "AAA", + new Date(), + null + }; + + when(myEntityManager.createQuery(any(CriteriaQuery.class))).thenReturn(myTypedQuery); + when(myTypedQuery.getResultList()).thenReturn(List.of( + new TupleImpl(null, tuple) + )); + + IResourceLookup result = myHelperSvc.resolveResourceIdentity(partitionId, resourceType, resourceForcedId, ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled()); + assertEquals(tuple[0], result.getPersistentId().getId()); + assertEquals(tuple[1], result.getResourceType()); + assertEquals(tuple[3], result.getDeleted()); + } + + } diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml index 8a3867a290a..02632b55cbd 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseR4SearchLastN.java b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseR4SearchLastN.java index ac65dc7307c..5e54957de31 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseR4SearchLastN.java +++ b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseR4SearchLastN.java @@ -72,7 +72,7 @@ abstract public class BaseR4SearchLastN extends BaseJpaTest { protected static IIdType patient2Id = null; // Using static variables including the flag below so that we can initalize the database and indexes once // (all of the tests only read from the DB and indexes and so no need to re-initialze them for each test). - private static Calendar observationDate = new GregorianCalendar(); + private static final Calendar observationDate = new GregorianCalendar(); protected final String observationCd0 = "code0"; protected final String observationCd1 = "code1"; protected final String observationCd2 = "code2"; @@ -120,9 +120,10 @@ abstract public class BaseR4SearchLastN extends BaseJpaTest { // enabled to also create extended lucene index during creation of test data boolean hsearchSaved = myStorageSettings.isAdvancedHSearchIndexing(); myStorageSettings.setAdvancedHSearchIndexing(true); + myStorageSettings.setDeleteEnabled(false); // Using a static flag to ensure that test data and elasticsearch index is only created once. - // Creating this data and the index is time consuming and as such want to avoid having to repeat for each test. + // Creating this data and the index is time-consuming and as such want to avoid having to repeat for each test. // Normally would use a static @BeforeClass method for this purpose, but Autowired objects cannot be accessed in static methods. Patient pt = new Patient(); diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNAsyncIT.java b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNAsyncIT.java index 05efac05a17..5a96c844150 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNAsyncIT.java +++ b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNAsyncIT.java @@ -120,9 +120,10 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN { ourLog.info("Queries:\n * " + String.join("\n * ", queries)); + // 1 query to resolve the subject PIDs // 3 queries to actually perform the search // 1 query to lookup up Search from cache, and 2 chunked queries to retrieve resources by PID. - assertThat(queries).hasSize(6); + assertThat(queries).hasSize(7); // The first chunked query should have a full complement of PIDs StringBuilder firstQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'"); @@ -130,7 +131,7 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN { firstQueryPattern.append(",'[0-9]+'"); } firstQueryPattern.append("\\).*"); - assertThat(queries.get(4)).matches(firstQueryPattern.toString()); + assertThat(queries.get(5)).matches(firstQueryPattern.toString()); // the second chunked query should be padded with "-1". StringBuilder secondQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'"); @@ -141,7 +142,7 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN { secondQueryPattern.append(",'-1'"); } secondQueryPattern.append("\\).*"); - assertThat(queries.get(5)).matches(secondQueryPattern.toString()); + assertThat(queries.get(6)).matches(secondQueryPattern.toString()); } diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNIT.java b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNIT.java index c17a3aa742c..4c204554050 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNIT.java +++ b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNIT.java @@ -79,6 +79,10 @@ public class FhirResourceDaoR4SearchLastNIT extends BaseR4SearchLastN { // Set chunk size to 50 SearchBuilder.setMaxPageSizeForTest(50); + // Run once to fill caches + toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(), null)); + + // Actually test myCaptureQueriesListener.clear(); List results = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(), null)); assertThat(results).hasSize(75); diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java index 2bfc2ae1b2a..6ddd7fb80ee 100644 --- a/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java +++ b/hapi-fhir-jpaserver-elastic-test-utilities/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4ElasticsearchIT.java @@ -277,8 +277,7 @@ public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest implements I myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(codeSystem, codeSystemVersion, new SystemRequestDetails(), Collections.singletonList(valueSet), Collections.emptyList()); -// myTerminologyDeferredStorageSvc.saveAllDeferred(); - await().atMost(10, SECONDS).until(() -> { + await().atMost(20, SECONDS).until(() -> { myTerminologyDeferredStorageSvc.saveDeferred(); return myTerminologyDeferredStorageSvc.isStorageQueueEmpty(true); }); diff --git a/hapi-fhir-jpaserver-hfql/pom.xml b/hapi-fhir-jpaserver-hfql/pom.xml index 05150147500..7325bd0e32f 100644 --- a/hapi-fhir-jpaserver-hfql/pom.xml +++ b/hapi-fhir-jpaserver-hfql/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml index 9dadd7a729e..8b8d77344cc 100644 --- a/hapi-fhir-jpaserver-ips/pom.xml +++ b/hapi-fhir-jpaserver-ips/pom.xml @@ -3,7 +3,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 56809334162..f07832756c1 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java index f1c4b2946f2..7362adc18f0 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java @@ -53,7 +53,7 @@ import java.util.Date; import java.util.List; import java.util.Optional; -public class MdmLinkDaoSvc

> { +public class MdmLinkDaoSvc

, M extends IMdmLink

> { private static final Logger ourLog = Logs.getMdmTroubleshootingLog(); diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java index 690d2cc061d..5224dc86320 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/GoldenResourceMergerSvcImpl.java @@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc; import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc; import ca.uhn.fhir.mdm.api.IMdmLink; @@ -235,10 +236,11 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc { List toLinks = myMdmLinkDaoSvc.findMdmLinksByGoldenResource(theToResource); List toDelete = new ArrayList<>(); - IResourcePersistentId goldenResourcePid = myIdHelperService.resolveResourcePersistentIds( + IResourcePersistentId goldenResourcePid = myIdHelperService.resolveResourceIdentityPid( getPartitionIdForResource(theToResource), theToResource.getIdElement().getResourceType(), - theToResource.getIdElement().getIdPart()); + theToResource.getIdElement().getIdPart(), + ResolveIdentityMode.includeDeleted().cacheOk()); // reassign links: // to <- from diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByExampleSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByExampleSvc.java index ecb2b521931..7d95124727e 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByExampleSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/candidate/FindCandidateByExampleSvc.java @@ -44,7 +44,7 @@ import java.util.Set; import java.util.stream.Collectors; @Service -public class FindCandidateByExampleSvc

extends BaseCandidateFinder { +public class FindCandidateByExampleSvc

> extends BaseCandidateFinder { private static final Logger ourLog = Logs.getMdmTroubleshootingLog(); @Autowired diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java index fa3a282c402..e37946f7cbc 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmSurvivorshipSvcImplTest.java @@ -7,7 +7,10 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; +import ca.uhn.fhir.jpa.model.cross.JpaResourceLookup; import ca.uhn.fhir.jpa.model.dao.JpaPid; +import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc; import ca.uhn.fhir.mdm.api.IMdmSettings; import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum; @@ -24,6 +27,7 @@ import ca.uhn.fhir.mdm.util.MdmResourceUtil; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.extension.ExtendWith; @@ -127,7 +131,7 @@ public class MdmSurvivorshipSvcImplTest { List resources = new ArrayList<>(); List links = new ArrayList<>(); - Map sourceIdToPid = new HashMap<>(); + Map> sourceIdToPid = new HashMap<>(); for (int i = 0; i < 10; i++) { // we want our resources to be slightly different Patient patient = new Patient(); @@ -152,7 +156,7 @@ public class MdmSurvivorshipSvcImplTest { link.setSourceId(patient.getId()); link.setGoldenResourceId(goldenPatient.getId()); links.add(link); - sourceIdToPid.put(patient.getId(), link.getSourcePid()); + sourceIdToPid.put(patient.getIdElement(), new JpaResourceLookup("Patient", (Long) link.getSourcePid().getId(), null, new PartitionablePartitionId())); } IFhirResourceDao resourceDao = mock(IFhirResourceDao.class); @@ -176,7 +180,7 @@ public class MdmSurvivorshipSvcImplTest { when(myMdmSettings.getMdmRules()) .thenReturn(new MdmRulesJson()); doReturn(sourceIdToPid).when(myIIdHelperService) - .resolveResourcePersistentIds(any(RequestPartitionId.class), anyString(), any(List.class)); + .resolveResourceIdentities(any(RequestPartitionId.class), any(List.class), any()); // we will return a non-empty list to reduce mocking when(myEIDHelper.getExternalEid(any())) .thenReturn(Collections.singletonList(new CanonicalEID("example", "value", "use"))); @@ -199,14 +203,14 @@ public class MdmSurvivorshipSvcImplTest { verify(resourceDao) .update(eq(goldenPatientRebuilt), any(RequestDetails.class)); - ArgumentCaptor> idsCaptor = ArgumentCaptor.forClass(List.class); - verify(myIIdHelperService).resolveResourcePersistentIds(any(RequestPartitionId.class), anyString(), idsCaptor.capture()); + ArgumentCaptor> idsCaptor = ArgumentCaptor.forClass(List.class); + verify(myIIdHelperService).resolveResourceIdentities(any(RequestPartitionId.class), idsCaptor.capture(), any()); assertNotNull(idsCaptor.getValue()); assertFalse(idsCaptor.getValue().isEmpty()); - for (String id : idsCaptor.getValue()) { - assertFalse(id.contains("/")); - assertFalse(id.contains("Patient")); - } + List ids = idsCaptor.getValue().stream().map(IIdType::getValue).toList(); + + List expectedIds = resources.stream().map(t -> t.getIdElement().toUnqualifiedVersionless().getValue()).toList(); + assertThat(ids).asList().containsExactlyInAnyOrder(expectedIds.toArray()); } private MdmTransactionContext createTransactionContext() { diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearStepTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearStepTest.java index 0ee2a64191a..6d84f4d7063 100644 --- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearStepTest.java +++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/mdm/batch2/clear/MdmClearStepTest.java @@ -103,7 +103,7 @@ class MdmClearStepTest extends BaseMdmR4Test { assertPatientExists(myGoldenId); fail("Resource cannot be found"); } catch (ResourceNotFoundException e) { - assertEquals("HAPI-2001: Resource " + myGoldenId + " is not known", e.getMessage()); + assertEquals("HAPI-1996: Resource " + myGoldenId + " is not known", e.getMessage()); } } diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index a28c594a06e..4c77735cfce 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java index d88cb054541..fb3534f299e 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java @@ -334,6 +334,8 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl } public void setTransientForcedId(String theTransientForcedId) { + assert theTransientForcedId == null || !theTransientForcedId.contains("/") + : "Invalid FHIR ID: " + theTransientForcedId; myTransientForcedId = theTransientForcedId; } } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java index 6163011eeb4..7f1779c3a8f 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java @@ -383,9 +383,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @Transient private transient ResourceHistoryTable myCurrentVersionEntity; - @Transient - private transient ResourceHistoryTable myNewVersionEntity; - @Transient private transient boolean myVersionUpdatedInCurrentTransaction; @@ -836,16 +833,8 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas * multiple {@link ResourceHistoryTable} entities will result in a constraint error. */ public ResourceHistoryTable toHistory(boolean theCreateVersionTags) { - boolean createVersionTags = theCreateVersionTags; - ResourceHistoryTable retVal = myNewVersionEntity; - if (retVal == null) { - retVal = new ResourceHistoryTable(); - myNewVersionEntity = retVal; - } else { - // Tags should already be set - createVersionTags = false; - } + ResourceHistoryTable retVal = new ResourceHistoryTable(); retVal.setResourceId(myId); retVal.setResourceType(myResourceType); @@ -855,12 +844,20 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas retVal.setPartitionId(getPartitionId()); retVal.setHasTags(isHasTags()); - if (isHasTags() && createVersionTags) { + if (isHasTags() && theCreateVersionTags) { for (ResourceTag next : getTags()) { retVal.addTag(next); } } + // If we've deleted and updated the same resource in the same transaction, + // we need to actually create 2 distinct versions + if (getCurrentVersionEntity() != null + && getCurrentVersionEntity().getId() != null + && getVersion() == getCurrentVersionEntity().getVersion()) { + myVersion++; + } + populateHistoryEntityVersionAndDates(retVal); return retVal; diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index 316d3495cf9..3b5fa244225 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml index d4bc53818ec..4f1146e7b01 100644 --- a/hapi-fhir-jpaserver-subscription/pom.xml +++ b/hapi-fhir-jpaserver-subscription/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml index 5c2635fdfaa..c9701f08e4b 100644 --- a/hapi-fhir-jpaserver-test-dstu2/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2Test.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2Test.java index 324432a052f..0cd3edb41a2 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2Test.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2Test.java @@ -1,10 +1,5 @@ package ca.uhn.fhir.jpa.dao.dstu2; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.assertFalse; import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum; @@ -84,6 +79,8 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import java.io.IOException; import java.util.ArrayList; @@ -95,7 +92,11 @@ import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; @@ -439,37 +440,20 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test { } } - @Test - public void testCreateWithIllegalReference() { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testCreateWithIllegalReference_WrongTypeForId(boolean theClientAssignedId) { + IIdType id1; + Observation o1 = new Observation(); o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01"); - IIdType id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless(); - - try { - Patient p = new Patient(); - p.getManagingOrganization().setReference(id1); - myPatientDao.create(p, mySrd); - fail(""); - } catch (UnprocessableEntityException e) { - assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage()); + if (theClientAssignedId) { + o1.setId("A"); + id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless(); + } else { + id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless(); } - try { - Patient p = new Patient(); - p.getManagingOrganization().setReference(new IdDt("Organization", id1.getIdPart())); - myPatientDao.create(p, mySrd); - fail(""); - } catch (UnprocessableEntityException e) { - assertEquals(Msg.code(1095) + "Resource contains reference to unknown resource ID Organization/" + id1.getIdPart(), e.getMessage()); - } - - // Now with a forced ID - - o1 = new Observation(); - o1.setId("testCreateWithIllegalReference"); - o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01"); - id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless(); - try { Patient p = new Patient(); p.getManagingOrganization().setReference(id1); @@ -485,9 +469,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test { myPatientDao.create(p, mySrd); fail(""); } catch (InvalidRequestException e) { - assertEquals(Msg.code(1094) + "Resource Organization/testCreateWithIllegalReference not found, specified in path: Patient.managingOrganization", e.getMessage()); + assertEquals(Msg.code(1094) + "Resource Organization/" + id1.getIdPart() + " not found, specified in path: Patient.managingOrganization", e.getMessage()); } - } @Test @@ -677,8 +660,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test { } /** - * See #773 - */ + * See #773 + */ @Test public void testDeleteResourceWithOutboundDeletedResources() { myStorageSettings.setEnforceReferentialIntegrityOnDelete(false); @@ -1387,8 +1370,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test { } /** - * See #196 - */ + * See #196 + */ @Test public void testInvalidChainNames() { ReferenceParam param = null; @@ -1678,46 +1661,6 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test { } - @Test - public void testReadInvalidVersion() throws Exception { - String methodName = "testReadInvalidVersion"; - - Patient pat = new Patient(); - pat.addIdentifier().setSystem("urn:system").setValue(methodName); - IIdType id = myPatientDao.create(pat, mySrd).getId(); - - assertEquals(methodName, myPatientDao.read(id, mySrd).getIdentifier().get(0).getValue()); - - try { - myPatientDao.read(id.withVersion("0"), mySrd); - fail(""); - } catch (ResourceNotFoundException e) { - assertEquals(Msg.code(979) + "Version \"0\" is not valid for resource Patient/" + id.getIdPart(), e.getMessage()); - } - - try { - myPatientDao.read(id.withVersion("2"), mySrd); - fail(""); - } catch (ResourceNotFoundException e) { - assertEquals(Msg.code(979) + "Version \"2\" is not valid for resource Patient/" + id.getIdPart(), e.getMessage()); - } - - try { - myPatientDao.read(id.withVersion("H"), mySrd); - fail(""); - } catch (ResourceNotFoundException e) { - assertEquals(Msg.code(978) + "Version \"H\" is not valid for resource Patient/" + id.getIdPart(), e.getMessage()); - } - - try { - myPatientDao.read(new IdDt("Patient/9999999999999/_history/1"), mySrd); - fail(""); - } catch (ResourceNotFoundException e) { - assertEquals(Msg.code(1996) + "Resource Patient/9999999999999/_history/1 is not known", e.getMessage()); - } - - } - @Test public void testReadWithDeletedResource() { String methodName = "testReadWithDeletedResource"; diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java index 678da5fbfda..060e1430c5e 100644 --- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java +++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java @@ -2574,25 +2574,25 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test { @Test public void testUpdateRejectsInvalidTypes() { - Patient p1 = new Patient(); - p1.addIdentifier().setSystem("urn:system").setValue("testUpdateRejectsInvalidTypes"); - p1.addName().addFamily("Tester").addGiven("testUpdateRejectsInvalidTypes"); - IdDt p1id = (IdDt) myClient.create().resource(p1).execute().getId(); + Patient patient = new Patient(); + patient.addIdentifier().setSystem("urn:system").setValue("testUpdateRejectsInvalidTypes"); + patient.addName().addFamily("Tester").addGiven("testUpdateRejectsInvalidTypes"); + IdDt p1id = (IdDt) myClient.create().resource(patient).execute().getId(); - Organization p2 = new Organization(); - p2.getNameElement().setValue("testUpdateRejectsInvalidTypes"); + Organization org = new Organization(); + org.getNameElement().setValue("testUpdateRejectsInvalidTypes"); try { - myClient.update().resource(p2).withId("Organization/" + p1id.getIdPart()).execute(); + myClient.update().resource(org).withId("Organization/" + p1id.getIdPart()).execute(); fail(""); } catch (UnprocessableEntityException e) { - // good + assertThat(e.getMessage()).contains("HAPI-0930: Existing resource ID[Patient/" + p1id.getIdPart() + "] is of type[Patient] - Cannot update with [Organization]"); } try { - myClient.update().resource(p2).withId("Patient/" + p1id.getIdPart()).execute(); + myClient.update().resource(org).withId("Patient/" + p1id.getIdPart()).execute(); fail(""); } catch (UnprocessableEntityException e) { - // good + assertThat(e.getMessage()).contains("HAPI-0930: Existing resource ID[Patient/" + p1id.getIdPart() + "] is of type[Patient] - Cannot update with [Organization]"); } } diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml index bd243d07689..99000fbb047 100644 --- a/hapi-fhir-jpaserver-test-dstu3/pom.xml +++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java index f193c55af7c..065afeeb639 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3Test.java @@ -798,57 +798,6 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test { } - @Test - public void testCreateWithIllegalReference() { - Observation o1 = new Observation(); - o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01"); - IIdType id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless(); - - try { - Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(id1); - myPatientDao.create(p, mySrd); - fail(""); - } catch (UnprocessableEntityException e) { - assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage()); - } - - try { - Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(new IdType("Organization", id1.getIdPart())); - myPatientDao.create(p, mySrd); - fail(""); - } catch (UnprocessableEntityException e) { - assertEquals(Msg.code(1095) + "Resource contains reference to unknown resource ID Organization/" + id1.getIdPart(), e.getMessage()); - } - - // Now with a forced ID - - o1 = new Observation(); - o1.setId("testCreateWithIllegalReference"); - o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01"); - id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless(); - - try { - Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(id1); - myPatientDao.create(p, mySrd); - fail(""); - } catch (UnprocessableEntityException e) { - assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage()); - } - - try { - Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(new IdType("Organization", id1.getIdPart())); - myPatientDao.create(p, mySrd); - fail(""); - } catch (InvalidRequestException e) { - assertEquals(Msg.code(1094) + "Resource Organization/testCreateWithIllegalReference not found, specified in path: Patient.managingOrganization", e.getMessage()); - } - - } - @Test public void testCreateWithInvalid() { Observation o1 = new Observation(); @@ -2345,7 +2294,7 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test { myPatientDao.read(new IdType("Patient/9999999999999/_history/1"), mySrd); fail(""); } catch (ResourceNotFoundException e) { - assertEquals(Msg.code(1996) + "Resource Patient/9999999999999/_history/1 is not known", e.getMessage()); + assertEquals(Msg.code(2001) + "Resource Patient/9999999999999 is not known", e.getMessage()); } } diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java index 4b2e6ab1cae..7b75d488815 100644 --- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java +++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java @@ -1024,6 +1024,8 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv ValueSet updatedValueSet_v1 = valueSet_v1; updatedValueSet_v1.setName(valueSet_v1.getName().concat(" - MODIFIED")); + logAllValueSets(); + String url = myClient.getServerBase().concat("/").concat(myExtensionalVsId_v1.getValueAsString()); Bundle bundle = new Bundle(); bundle.setType(Bundle.BundleType.TRANSACTION); @@ -1056,7 +1058,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv .getRequest() .setMethod(Bundle.HTTPVerb.PUT) .setUrl("ValueSet/" + updatedValueSet_v2.getIdElement().getIdPart()); - ourLog.debug("Transaction Bundle:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle)); + ourLog.info("Transaction Bundle:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle)); myClient.transaction().withBundle(bundle).execute(); updatedValueSet_v2 = myValueSetDao.read(myExtensionalVsId_v2); diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml index c7d036896a9..8a9353386f4 100644 --- a/hapi-fhir-jpaserver-test-r4/pom.xml +++ b/hapi-fhir-jpaserver-test-r4/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoTest.java index 02fbafee38a..c423a70915b 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDaoTest.java @@ -71,6 +71,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.isNotNull; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.lenient; @@ -136,7 +137,7 @@ class BaseHapiFhirResourceDaoTest { private ArgumentCaptor mySearchParameterMapCaptor; // we won't inject this - private FhirContext myFhirContext = FhirContext.forR4Cached(); + private final FhirContext myFhirContext = FhirContext.forR4Cached(); @InjectMocks private TestResourceDao mySvc; @@ -236,16 +237,17 @@ class BaseHapiFhirResourceDaoTest { // mock when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForRead( any(RequestDetails.class), - Mockito.anyString(), + anyString(), any(IIdType.class) )).thenReturn(partitionId); - when(myIdHelperService.resolveResourcePersistentIds( + when(myIdHelperService.resolveResourceIdentityPid( any(RequestPartitionId.class), - Mockito.anyString(), - Mockito.anyString() + anyString(), + anyString(), + any() )).thenReturn(jpaPid); when(myEntityManager.find( - any(Class.class), + any(), anyLong() )).thenReturn(entity); // we don't stub myConfig.getResourceClientIdStrategy() diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java deleted file mode 100644 index fa0870d4e67..00000000000 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/index/IdHelperServiceTest.java +++ /dev/null @@ -1,213 +0,0 @@ -package ca.uhn.fhir.jpa.dao.index; - -import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; -import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; -import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.model.cross.IResourceLookup; -import ca.uhn.fhir.jpa.model.dao.JpaPid; -import ca.uhn.fhir.jpa.util.MemoryCacheService; -import org.hl7.fhir.r4.model.Patient; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.stream.Collectors; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyBoolean; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -public class IdHelperServiceTest { - - @Mock - private JpaStorageSettings myStorageSettings; - - @Mock - private IResourceTableDao myResourceTableDao; - - @Mock - private MemoryCacheService myMemoryCacheService; - - @Mock - private PartitionSettings myPartitionSettings; - - @InjectMocks - private IdHelperService myHelperService; - - @BeforeEach - public void beforeEach() { - myHelperService.setDontCheckActiveTransactionForUnitTest(true); - } - - @Test - public void resolveResourcePersistentIds_withValidPids_returnsMap() { - RequestPartitionId partitionId = RequestPartitionId.allPartitions(); - String resourceType = Patient.class.getSimpleName(); - List patientIdsToResolve = new ArrayList<>(); - patientIdsToResolve.add("123"); - patientIdsToResolve.add("456"); - - // test - Map idToPid = myHelperService.resolveResourcePersistentIds(partitionId, - resourceType, - patientIdsToResolve); - - assertFalse(idToPid.isEmpty()); - for (String pid : patientIdsToResolve) { - assertThat(idToPid).containsKey(pid); - } - } - - @Test - public void resolveResourcePersistentIds_withForcedIdsAndDeleteEnabled_returnsMap() { - RequestPartitionId partitionId = RequestPartitionId.allPartitions(); - String resourceType = Patient.class.getSimpleName(); - List patientIdsToResolve = new ArrayList<>(); - patientIdsToResolve.add("RED"); - patientIdsToResolve.add("BLUE"); - - Object[] redView = new Object[] { - "Patient", - 123l, - "RED", - new Date(), - null, - null - }; - Object[] blueView = new Object[] { - "Patient", - 456l, - "BLUE", - new Date(), - null, - null - }; - - // when - when(myStorageSettings.isDeleteEnabled()) - .thenReturn(true); - when(myResourceTableDao.findAndResolveByForcedIdWithNoType(Mockito.anyString(), - Mockito.anyList(), Mockito.anyBoolean())) - .thenReturn(Collections.singletonList(redView)) - .thenReturn(Collections.singletonList(blueView)); - - // test - Map map = myHelperService.resolveResourcePersistentIds( - partitionId, - resourceType, - patientIdsToResolve); - - assertFalse(map.isEmpty()); - for (String id : patientIdsToResolve) { - assertThat(map).containsKey(id); - } - } - - @Test - public void resolveResourcePersistenIds_withForcedIdAndDeleteDisabled_returnsMap() { - RequestPartitionId partitionId = RequestPartitionId.allPartitions(); - String resourceType = Patient.class.getSimpleName(); - List patientIdsToResolve = new ArrayList<>(); - patientIdsToResolve.add("RED"); - patientIdsToResolve.add("BLUE"); - - JpaPid red = JpaPid.fromIdAndVersion(123L, 123L); - JpaPid blue = JpaPid.fromIdAndVersion(456L, 456L); - - // we will pretend the lookup value is in the cache - when(myMemoryCacheService.getThenPutAfterCommit(any(MemoryCacheService.CacheEnum.class), - Mockito.anyString(), - any(Function.class))) - .thenReturn(red) - .thenReturn(blue); - - // test - Map map = myHelperService.resolveResourcePersistentIds( - partitionId, - resourceType, - patientIdsToResolve - ); - - assertFalse(map.isEmpty()); - for (String id : patientIdsToResolve) { - assertThat(map).containsKey(id); - } - assertThat(map).containsEntry("RED", red); - assertThat(map).containsEntry("BLUE", blue); - } - - @Test - public void testResolveResourceIdentity_defaultFunctionality(){ - RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition"); - String resourceType = "Patient"; - String resourceForcedId = "AAA"; - - Object[] forcedIdView = new Object[6]; - forcedIdView[0] = resourceType; - forcedIdView[1] = 1L; - forcedIdView[2] = resourceForcedId; - forcedIdView[3] = null; - forcedIdView[4] = null; - forcedIdView[5] = null; - - Collection testForcedIdViews = new ArrayList<>(); - testForcedIdViews.add(forcedIdView); - when(myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartition(any(), any(), any(), anyBoolean())).thenReturn(testForcedIdViews); - - IResourceLookup result = myHelperService.resolveResourceIdentity(partitionId, resourceType, resourceForcedId); - assertEquals(forcedIdView[0], result.getResourceType()); - assertEquals(forcedIdView[1], result.getPersistentId().getId()); - assertEquals(forcedIdView[3], result.getDeleted()); - } - - @Test - public void testResolveResourcePersistentIds_mapDefaultFunctionality(){ - RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition"); - String resourceType = "Patient"; - List ids = Arrays.asList("A", "B", "C"); - - JpaPid resourcePersistentId1 = JpaPid.fromId(1L); - JpaPid resourcePersistentId2 = JpaPid.fromId(2L); - JpaPid resourcePersistentId3 = JpaPid.fromId(3L); - when(myMemoryCacheService.getThenPutAfterCommit(any(), any(), any())) - .thenReturn(resourcePersistentId1) - .thenReturn(resourcePersistentId2) - .thenReturn(resourcePersistentId3); - Map result = myHelperService.resolveResourcePersistentIds(partitionId, resourceType, ids) - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue())); - assertThat(result.keySet()).hasSize(3); - assertEquals(1L, result.get("A").getId()); - assertEquals(2L, result.get("B").getId()); - assertEquals(3L, result.get("C").getId()); - } - - @Test - public void testResolveResourcePersistentIds_resourcePidDefaultFunctionality(){ - RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition"); - String resourceType = "Patient"; - Long id = 1L; - - JpaPid jpaPid1 = JpaPid.fromId(id); - when(myStorageSettings.getResourceClientIdStrategy()).thenReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY); - when(myMemoryCacheService.getThenPutAfterCommit(any(), any(), any())).thenReturn(jpaPid1); - JpaPid result = myHelperService.resolveResourcePersistentIds(partitionId, resourceType, id.toString()); - assertEquals(id, result.getId()); - } -} diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java index de665e28ec0..fbe9348cf00 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java @@ -14,6 +14,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; +import ca.uhn.fhir.jpa.searchparam.submit.interceptor.SearchParamValidatingInterceptor; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.util.HapiExtensions; import com.helger.commons.lang.StackTraceHelper; @@ -52,6 +53,9 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest { private boolean myHaveDroppedForcedIdUniqueConstraint; @Autowired private IPartitionLookupSvc myPartitionConfigSvc; + @Autowired + private SearchParamValidatingInterceptor mySearchParamValidatingInterceptor; + private boolean myRegisteredSearchParamValidatingInterceptor; @AfterEach public void after() { @@ -68,6 +72,10 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest { myStorageSettings.setAutoCreatePlaceholderReferenceTargets(new JpaStorageSettings().isAutoCreatePlaceholderReferenceTargets()); myStorageSettings.setMassIngestionMode(new JpaStorageSettings().isMassIngestionMode()); myStorageSettings.setMatchUrlCacheEnabled(new JpaStorageSettings().getMatchUrlCache()); + + if (myRegisteredSearchParamValidatingInterceptor) { + myInterceptorRegistry.unregisterInterceptor(mySearchParamValidatingInterceptor); + } } protected void assertNoRemainingPartitionIds() { @@ -112,6 +120,10 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest { myPartitionConfigSvc.getPartitionById(i); } + if (myInterceptorRegistry.getAllRegisteredInterceptors().stream().noneMatch(t->t instanceof SearchParamValidatingInterceptor)) { + myRegisteredSearchParamValidatingInterceptor = true; + myInterceptorRegistry.registerInterceptor(mySearchParamValidatingInterceptor); + } } protected void registerPartitionInterceptor() { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java index 9d84c08a78e..ef1b2714743 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java @@ -8,9 +8,11 @@ import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException; +import jakarta.persistence.Id; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.IdType; @@ -36,6 +38,24 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test { myStorageSettings.setDeleteEnabled(new JpaStorageSettings().isDeleteEnabled()); } + @Test + public void testPreventUnDeleteIfDeletesDisabled() { + // Setup + createPatient(withId("A"), withActiveTrue()); + myPatientDao.delete(new IdType("Patient/A"), mySrd); + + // Test + myStorageSettings.setDeleteEnabled(false); + try { + createPatient(withId("A"), withActiveFalse()); + fail(); + } catch (InvalidRequestException e) { + // Verify + assertThat(e.getMessage()).contains("HAPI-2573: Unable to restore previously deleted resource as deletes are disabled"); + } + } + + @Test public void testDeleteMarksResourceAndVersionAsDeleted() { diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java index 4c9464fa4d1..635e847d5dc 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4QueryCountTest.java @@ -7,8 +7,8 @@ import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; import ca.uhn.fhir.batch2.jobs.chunk.TypedPidJson; import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeStep; -import ca.uhn.fhir.batch2.jobs.reindex.v1.ReindexStepV1; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; +import ca.uhn.fhir.batch2.jobs.reindex.v2.ReindexStepV2; import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.WorkChunk; import ca.uhn.fhir.context.FhirContext; @@ -40,6 +40,7 @@ import ca.uhn.fhir.rest.api.SortSpec; import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; import ca.uhn.fhir.rest.param.ReferenceParam; +import ca.uhn.fhir.rest.param.TokenOrListParam; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.server.SimpleBundleProvider; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; @@ -51,6 +52,7 @@ import ca.uhn.fhir.test.utilities.server.HashMapResourceProviderExtension; import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; import ca.uhn.fhir.util.BundleBuilder; import jakarta.annotation.Nonnull; +import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.BooleanType; @@ -100,6 +102,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -149,7 +152,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test @Autowired private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc; @Autowired - private ReindexStepV1 myReindexStepV1; + private ReindexStepV2 myReindexStep; @Autowired private DeleteExpungeStep myDeleteExpungeStep; @Autowired @@ -248,6 +251,70 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test } + @Test + public void testTransactionWithManyResourceLinks() { + // Setup + List patientIds = new ArrayList<>(); + List orgIds = new ArrayList<>(); + List coverageIds = new ArrayList<>(); + AtomicInteger counter = new AtomicInteger(0); + for (int i = 0; i < 10; i++) { + // Server-assigned numeric ID + coverageIds.add(createResource("Coverage", withStatus("active"))); + // Client-assigned string ID + patientIds.add(createPatient(withId(UUID.randomUUID().toString()), withActiveTrue())); + orgIds.add(createOrganization(withId(UUID.randomUUID().toString()), withName("FOO"))); + } + + Supplier creator = () -> { + int nextCount = counter.getAndIncrement(); + assert nextCount < coverageIds.size(); + + ExplanationOfBenefit retVal = new ExplanationOfBenefit(); + retVal.getMeta().addTag().setSystem("http://foo").setCode(Integer.toString(nextCount % 3)); + retVal.setId(UUID.randomUUID().toString()); + retVal.setPatient(new Reference(patientIds.get(nextCount))); + retVal.setInsurer(new Reference(orgIds.get(nextCount))); + retVal.addInsurance().setCoverage(new Reference(coverageIds.get(nextCount))); + return retVal; + }; + + Supplier transactionCreator = () -> { + BundleBuilder bb = new BundleBuilder(myFhirContext); + bb.addTransactionUpdateEntry(creator.get()); + bb.addTransactionUpdateEntry(creator.get()); + bb.addTransactionUpdateEntry(creator.get()); + bb.addTransactionUpdateEntry(creator.get()); + bb.addTransactionUpdateEntry(creator.get()); + return bb.getBundleTyped(); + }; + + // Test + myCaptureQueriesListener.clear(); + mySystemDao.transaction(mySrd, transactionCreator.get()); + myCaptureQueriesListener.logInsertQueries(); + myCaptureQueriesListener.logSelectQueries(); + + // Verify + assertEquals(0, myCaptureQueriesListener.countInsertQueriesRepeated()); + assertEquals(33, myCaptureQueriesListener.countInsertQueries()); + assertEquals(4, myCaptureQueriesListener.countSelectQueries()); + assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); + assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); + + // Test again + myCaptureQueriesListener.clear(); + mySystemDao.transaction(mySrd, transactionCreator.get()); + + // Verify again + assertEquals(0, myCaptureQueriesListener.countInsertQueriesRepeated()); + assertEquals(30, myCaptureQueriesListener.countInsertQueries()); + assertEquals(1, myCaptureQueriesListener.countSelectQueries()); + assertEquals(0, myCaptureQueriesListener.countUpdateQueries()); + assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); + } + + /* * See the class javadoc before changing the counts in this test! */ @@ -331,7 +398,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); group = updateGroup(group, patientList.subList(initialPatientsCount, allPatientsCount)); - assertQueryCount(10, 1, 2, 0); + assertQueryCount(9, 1, 2, 0); assertThat(group.getMember()).hasSize(allPatientsCount); @@ -356,7 +423,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test group = updateGroup(group, Collections.emptyList()); myCaptureQueriesListener.logSelectQueries(); - assertQueryCount(5, 1, 2, 0); + assertQueryCount(4, 1, 2, 0); assertThat(group.getMember()).hasSize(30); @@ -938,7 +1005,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test patient.setActive(true); myCaptureQueriesListener.clear(); - myPatientDao.update(patient); + myPatientDao.update(patient, mySrd); /* * Add a resource with a forced ID target link @@ -947,7 +1014,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); Observation observation = new Observation(); observation.getSubject().setReference("Patient/P"); - myObservationDao.create(observation); + myObservationDao.create(observation, mySrd); myCaptureQueriesListener.logAllQueriesForCurrentThread(); // select: lookup forced ID assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); @@ -998,14 +1065,13 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test Observation observation = new Observation(); observation.getSubject().setReference("Patient/P"); myObservationDao.create(observation); - myCaptureQueriesListener.logAllQueriesForCurrentThread(); - // select: lookup forced ID - assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); - assertNoPartitionSelectors(); + myCaptureQueriesListener.logSelectQueries(); + assertEquals(0, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); // insert to: HFJ_RESOURCE, HFJ_RES_VER, HFJ_RES_LINK assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); + assertNoPartitionSelectors(); /* * Add another @@ -1064,7 +1130,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test instance, mock(WorkChunk.class) ); - RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, mock(IJobDataSink.class)); + RunOutcome outcome = myReindexStep.run(stepExecutionDetails, mock(IJobDataSink.class)); // validate assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(theExpectedSelectCount); @@ -1109,7 +1175,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test instance, mock(WorkChunk.class) ); - RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, mock(IJobDataSink.class)); + RunOutcome outcome = myReindexStep.run(stepExecutionDetails, mock(IJobDataSink.class)); assertEquals(20, outcome.getRecordsProcessed()); // validate @@ -1343,6 +1409,57 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test return ids; } + @Test + public void testSearchByMultipleIds() { + // Setup + List idValues = new ArrayList<>(); + for (int i = 0; i < 5; i++) { + // Client assigned and server assigned IDs + idValues.add(createPatient(withId(UUID.randomUUID().toString()), withActiveTrue()).toUnqualifiedVersionless().getValue()); + idValues.add(createPatient(withActiveTrue()).toUnqualifiedVersionless().getValue()); + } + String[] idValueArray = idValues.toArray(new String[0]); + + // Test + SearchParameterMap map = SearchParameterMap.newSynchronous(); + map.add(IAnyResource.SP_RES_ID, new TokenOrListParam(null, idValueArray)); + myCaptureQueriesListener.clear(); + IBundleProvider outcome = myPatientDao.search(map, mySrd); + List values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + assertThat(values).asList().containsExactlyInAnyOrder(idValueArray); + + // Now invalidate the caches, should add one more query + myMemoryCacheService.invalidateAllCaches(); + map = SearchParameterMap.newSynchronous(); + map.add(IAnyResource.SP_RES_ID, new TokenOrListParam(null, idValueArray)); + myCaptureQueriesListener.clear(); + outcome = myPatientDao.search(map, mySrd); + values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); + assertThat(values).asList().containsExactlyInAnyOrder(idValueArray); + + // And again, should be cached once more + map = SearchParameterMap.newSynchronous(); + map.add(IAnyResource.SP_RES_ID, new TokenOrListParam(null, idValueArray)); + myCaptureQueriesListener.clear(); + outcome = myPatientDao.search(map, mySrd); + values = toUnqualifiedVersionlessIdValues(outcome); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(2, myCaptureQueriesListener.countSelectQueries()); + assertThat(values).asList().containsExactlyInAnyOrder(idValueArray); + + } + + /** * See the class javadoc before changing the counts in this test! @@ -1582,12 +1699,13 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); IBundleProvider outcome = dao.search(map, mySrd); toUnqualifiedVersionlessIdValues(outcome); - assertEquals(4, myCaptureQueriesListener.countSelectQueries()); + myCaptureQueriesListener.logSelectQueries(); + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); myCaptureQueriesListener.clear(); outcome = dao.search(map, mySrd); toUnqualifiedVersionlessIdValues(outcome); - assertEquals(4, myCaptureQueriesListener.countSelectQueries()); + assertEquals(3, myCaptureQueriesListener.countSelectQueries()); } @@ -1869,6 +1987,62 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test runInTransaction(() -> assertEquals(2, myResourceTableDao.count())); } + /** + * Make sure that even if we're using versioned references, we still take + * advantage of query pre-fetching. + */ + @Test + public void testTransactionPreFetchFullyQualifiedVersionedIds() { + // Setup + myStorageSettings.getTreatBaseUrlsAsLocal().add("http://localhost"); + myFhirContext.getParserOptions().setStripVersionsFromReferences(false); + + createPatient(withId("P0"), withActiveTrue()); + createPatient(withId("P1"), withActiveTrue()); + createEncounter(withId("E0"), withSubject("Patient/P0"), withStatus("planned")); + createObservation(withId("O0"), withSubject("Patient/P0")); + + Bundle input = new Bundle(); + input.setType(Bundle.BundleType.TRANSACTION); + + Patient patient = new Patient(); + patient.setId("Patient/P1"); + patient.setActive(false); + input.addEntry() + .setResource(patient) + .setFullUrl("http://localhost/Patient/P1/_history/1") + .getRequest() + .setUrl("Patient/P1/_history/1") + .setMethod(Bundle.HTTPVerb.PUT); + + Observation observation = new Observation(); + observation.setId("Observation/O0"); + observation.setSubject(new Reference("http://localhost/Patient/P0/_history/1")); + observation.setEncounter(new Reference("http://localhost/Encounter/E0/_history/1")); + input.addEntry() + .setResource(observation) + .setFullUrl("http://localhost/Observation/O0/_history/1") + .getRequest() + .setUrl("Observation/O0/_history/1") + .setMethod(Bundle.HTTPVerb.PUT); + + myCaptureQueriesListener.clear(); + mySystemDao.transaction(mySrd, input); + + // Verify + myCaptureQueriesListener.logSelectQueries(); + assertEquals(4, myCaptureQueriesListener.countSelectQueries()); + assertEquals(5, myCaptureQueriesListener.countUpdateQueries()); + assertEquals(3, myCaptureQueriesListener.countInsertQueries()); + assertEquals(0, myCaptureQueriesListener.countDeleteQueries()); + assertEquals(1, myCaptureQueriesListener.countCommits()); + + observation = myObservationDao.read(new IdType("Observation/O0"), mySrd); + ourLog.info("Observation:{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(observation)); + assertEquals("Patient/P0/_history/1", observation.getSubject().getReference()); + assertEquals("Encounter/E0/_history/1", observation.getEncounter().getReference()); + } + /** * See the class javadoc before changing the counts in this test! */ @@ -2061,9 +2235,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); mySystemDao.transaction(mySrd, input); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(1, countMatches(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), "'6445233466262474106'")); - assertEquals(1, countMatches(myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false), "'LOC'")); assertEquals(6, runInTransaction(() -> myResourceTableDao.count())); // Second identical pass @@ -2114,7 +2287,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); mySystemDao.transaction(mySrd, input); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(6, runInTransaction(() -> myResourceTableDao.count())); // Second identical pass @@ -2172,7 +2345,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); mySystemDao.transaction(mySrd, input); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(7, runInTransaction(() -> myResourceTableDao.count())); // Second identical pass @@ -2227,7 +2400,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); mySystemDao.transaction(mySrd, input); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(7, runInTransaction(() -> myResourceTableDao.count())); // Second identical pass @@ -2649,7 +2822,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test // Lookup the two existing IDs to make sure they are legit myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -2677,7 +2850,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test // Lookup the two existing IDs to make sure they are legit myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -2721,7 +2894,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test // Lookup the two existing IDs to make sure they are legit myCaptureQueriesListener.logInsertQueriesForCurrentThread(); myCaptureQueriesListener.logUpdateQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -2749,7 +2922,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test // Lookup the two existing IDs to make sure they are legit myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -2795,7 +2968,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test // Lookup the two existing IDs to make sure they are legit myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(0, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); @@ -3068,8 +3241,13 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test Bundle input = loadResource(myFhirContext, Bundle.class, "/r4/test-patient-bundle.json"); myCaptureQueriesListener.clear(); - Bundle output = mySystemDao.transaction(mySrd, input); - myCaptureQueriesListener.logSelectQueries(); + Bundle output; + try { + output = mySystemDao.transaction(mySrd, input); + } finally { + myCaptureQueriesListener.logSelectQueries(); + myCaptureQueriesListener.logInsertQueries(); + } assertEquals(17, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(6189, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); @@ -3113,6 +3291,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test Bundle output = mySystemDao.transaction(mySrd, input); // Verify + myCaptureQueriesListener.logUpdateQueries(); assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(6, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); @@ -3127,6 +3306,15 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test assertEquals(1, myResourceHistoryTableDao.count()); }); + IdType id = new IdType(output.getEntry().get(0).getResponse().getLocation()); + assertEquals("1", id.getVersionIdPart()); + id = new IdType(output.getEntry().get(1).getResponse().getLocation()); + assertEquals("2", id.getVersionIdPart()); + + Patient p = myPatientDao.read(id.toVersionless(), mySrd); + assertEquals("2", p.getIdElement().getVersionIdPart()); + assertEquals("http://system", p.getIdentifierFirstRep().getSystem()); + assertTrue(p.getActive()); } /** @@ -3164,7 +3352,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test mySystemDao.transaction(mySrd, inputBundle); assertEquals(21, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size()); - assertEquals(78, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); + myCaptureQueriesListener.logInsertQueries(); + assertEquals(7, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size()); // Now run the transaction again - It should not need too many SELECTs @@ -3201,6 +3390,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); inputBundle = myReindexTestHelper.createTransactionBundleWith20Observation(true); mySystemDao.transaction(mySrd, inputBundle); + myCaptureQueriesListener.logSelectQueries(); assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size()); assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size()); @@ -3212,7 +3402,6 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test /** * See the class javadoc before changing the counts in this test! */ - @SuppressWarnings("unchecked") @Test public void testTriggerSubscription_Sync() throws Exception { // Setup @@ -3505,6 +3694,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test myCaptureQueriesListener.clear(); Bundle outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get()); + myCaptureQueriesListener.logSelectQueries(); assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); myCaptureQueriesListener.logInsertQueries(); assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ReferentialIntegrityTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ReferentialIntegrityTest.java index 8020f0556ea..5f10635772a 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ReferentialIntegrityTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ReferentialIntegrityTest.java @@ -97,9 +97,9 @@ public class FhirResourceDaoR4ReferentialIntegrityTest extends BaseJpaR4Test { // When myPatientDao.create(p); fail(); - } catch (UnprocessableEntityException e) { + } catch (InvalidRequestException e) { // Then: identify that it is the wrong resource type, since ref integrity is enabled - assertEquals(Msg.code(1095) + "Resource contains reference to unknown resource ID Organization/" + obsId.getIdPart(), e.getMessage()); + assertEquals(Msg.code(1094) + "Resource Organization/" + obsId.getIdPart() + " not found, specified in path: Patient.managingOrganization", e.getMessage()); } // Given: now disable referential integrity on write diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchIncludeTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchIncludeTest.java index 0e558751e7c..023cbc336b1 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchIncludeTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchIncludeTest.java @@ -177,7 +177,7 @@ public class FhirResourceDaoR4SearchIncludeTest extends BaseJpaR4Test { } if (!theReverse && theMatchAll) { - SqlQuery searchForCanonicalReferencesQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(3); + SqlQuery searchForCanonicalReferencesQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2); // Make sure we have the right query - If this ever fails, maybe we have optimized the queries // (or somehow made things worse) and the search for the canonical target is no longer the 4th // SQL query diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java index 2331b0f8a41..9cec742dca9 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java @@ -1731,65 +1731,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test { } - @Test - public void testSearchByIdParam_QueryIsMinimal() { - // With only an _id parameter - { - SearchParameterMap params = new SearchParameterMap(); - params.setLoadSynchronous(true); - params.add(PARAM_ID, new StringParam("DiagnosticReport/123")); - myCaptureQueriesListener.clear(); - myDiagnosticReportDao.search(params).size(); - List selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); - assertThat(selectQueries).hasSize(1); - - String sqlQuery = selectQueries.get(0).getSql(true, true).toLowerCase(); - ourLog.info("SQL Query:\n{}", sqlQuery); - assertThat(countMatches(sqlQuery, "res_id = '123'")).as(sqlQuery).isEqualTo(1); - assertThat(countMatches(sqlQuery, "join")).as(sqlQuery).isEqualTo(0); - assertThat(countMatches(sqlQuery, "res_type = 'diagnosticreport'")).as(sqlQuery).isEqualTo(1); - assertThat(countMatches(sqlQuery, "res_deleted_at is null")).as(sqlQuery).isEqualTo(1); - } - // With an _id parameter and a standard search param - { - SearchParameterMap params = new SearchParameterMap(); - params.setLoadSynchronous(true); - params.add(PARAM_ID, new StringParam("DiagnosticReport/123")); - params.add("code", new TokenParam("foo", "bar")); - myCaptureQueriesListener.clear(); - myDiagnosticReportDao.search(params).size(); - List selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); - assertThat(selectQueries).hasSize(1); - - String sqlQuery = selectQueries.get(0).getSql(true, true).toLowerCase(); - ourLog.info("SQL Query:\n{}", sqlQuery); - assertThat(countMatches(sqlQuery, "res_id = '123'")).as(sqlQuery).isEqualTo(1); - assertThat(countMatches(sqlQuery, "join")).as(sqlQuery).isEqualTo(1); - assertThat(countMatches(sqlQuery, "hash_sys_and_value")).as(sqlQuery).isEqualTo(1); - assertThat(countMatches(sqlQuery, "res_type = 'diagnosticreport")).as(sqlQuery).isEqualTo(0); // could be 0 - assertThat(countMatches(sqlQuery, "res_deleted_at")).as(sqlQuery).isEqualTo(0); // could be 0 - } - } - - @Test - public void testSearchByIdParamAndOtherSearchParam_QueryIsMinimal() { - SearchParameterMap params = new SearchParameterMap(); - params.setLoadSynchronous(true); - params.add(PARAM_ID, new StringParam("DiagnosticReport/123")); - params.add(PARAM_ID, new StringParam("DiagnosticReport/123")); - myCaptureQueriesListener.clear(); - myDiagnosticReportDao.search(params).size(); - List selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); - assertThat(selectQueries).hasSize(1); - - String sqlQuery = selectQueries.get(0).getSql(true, true).toLowerCase(); - ourLog.info("SQL Query:\n{}", sqlQuery); - assertThat(countMatches(sqlQuery, "res_id = '123'")).as(sqlQuery).isEqualTo(1); - assertThat(countMatches(sqlQuery, "join")).as(sqlQuery).isEqualTo(0); - assertThat(countMatches(sqlQuery, "res_type = 'diagnosticreport'")).as(sqlQuery).isEqualTo(1); - assertThat(countMatches(sqlQuery, "res_deleted_at is null")).as(sqlQuery).isEqualTo(1); - } - @Test public void testSearchByIdParamAnd() { IIdType id1; diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java index e03d3725746..460d755f7e9 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchOptimizedTest.java @@ -95,7 +95,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { mySearchCoordinatorSvcImpl = ProxyUtil.getSingletonTarget(mySearchCoordinatorSvc, SearchCoordinatorSvcImpl.class); mySearchCoordinatorSvcImpl.setLoadingThrottleForUnitTests(null); mySearchCoordinatorSvcImpl.setSyncSizeForUnitTests(QueryParameterUtils.DEFAULT_SYNC_SIZE); - myCaptureQueriesListener.setCaptureQueryStackTrace(true); +// myCaptureQueriesListener.setCaptureQueryStackTrace(true); myStorageSettings.setAdvancedHSearchIndexing(false); } @@ -876,8 +876,11 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { String obs2id = myObservationDao.create(obs2).getId().getIdPart(); assertThat(obs2id).matches("^[0-9]+$"); - // Search by ID where all IDs are forced IDs + + // Search by ID where all IDs are forced IDs, and in two separate params { + myMemoryCacheService.invalidateAllCaches(); + SearchParameterMap map = SearchParameterMap.newSynchronous(); map.add("_id", new TokenParam("A")); map.add("subject", new ReferenceParam("Patient/B")); @@ -885,20 +888,20 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { myCaptureQueriesListener.clear(); IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails()); assertThat(outcome.getResources(0, 999)).hasSize(1); + myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "rt1_0.res_type='observation'")).as(selectQuery).isEqualTo(1); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "rt1_0.fhir_id in ('a')")).as(selectQuery).isEqualTo(1); - + assertThat(selectQuery).contains("where rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B'"); selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "select t1.res_id from hfj_resource t1")).as(selectQuery).isEqualTo(1); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t1.res_type='observation'")).as(selectQuery).isEqualTo(0); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t1.res_deleted_at is null")).as(selectQuery).isEqualTo(0); + assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A')"); + assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + } // Search by ID where at least one ID is a numeric ID { + myMemoryCacheService.invalidateAllCaches(); + SearchParameterMap map = SearchParameterMap.newSynchronous(); map.add("_id", new TokenOrListParam(null, "A", obs2id)); myCaptureQueriesListener.clear(); @@ -906,20 +909,19 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { assertEquals(2, outcome.size()); assertThat(outcome.getResources(0, 999)).hasSize(2); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "select t0.res_id from hfj_resource t0")).as(selectQuery).isEqualTo(1); - // Because we included a non-forced ID, we need to verify the type - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_type = 'observation'")).as(selectQuery).isEqualTo(1); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_deleted_at is null")).as(selectQuery).isEqualTo(1); + String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); + assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A' or rt1_0.RES_ID='" + obs2id + "')"); + assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); } - // Delete the resource - There should only be one search performed because deleted resources will - // be filtered out in the query that resolves forced ids to persistent ids + // Delete the resource myObservationDao.delete(new IdType("Observation/A")); myObservationDao.delete(new IdType("Observation/" + obs2id)); // Search by ID where all IDs are forced IDs { + myMemoryCacheService.invalidateAllCaches(); + SearchParameterMap map = SearchParameterMap.newSynchronous(); map.add("_id", new TokenParam("A")); myCaptureQueriesListener.clear(); @@ -928,26 +930,9 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { assertThat(outcome.getResources(0, 999)).isEmpty(); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(1); String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "rt1_0.res_type='observation'")).as(selectQuery).isEqualTo(1); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "rt1_0.fhir_id in ('a')")).as(selectQuery).isEqualTo(1); - } - - // Search by ID where at least one ID is a numeric ID - { - SearchParameterMap map = SearchParameterMap.newSynchronous(); - map.add("_id", new TokenOrListParam(null, "A", obs2id)); - myCaptureQueriesListener.clear(); - IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails()); - assertEquals(0, outcome.size()); - assertThat(outcome.getResources(0, 999)).isEmpty(); - myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "select t0.res_id from hfj_resource t0")).as(selectQuery).isEqualTo(1); - // Because we included a non-forced ID, we need to verify the type - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_type = 'observation'")).as(selectQuery).isEqualTo(1); - assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_deleted_at is null")).as(selectQuery).isEqualTo(1); + assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A')"); + assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); } } @@ -1248,7 +1233,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test { myPatientDao.update(p).getId().toUnqualifiedVersionless(); myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java index c220b635433..82c8ae17cb7 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java @@ -5,6 +5,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; +import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum; import ca.uhn.fhir.jpa.api.pid.StreamTemplate; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; @@ -1090,16 +1091,28 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test { runInTransaction(() -> { Patient p = new Patient(); - p.setId(id); + p.setId(id.getIdPart()); p.setActive(true); p.addName().setFamily("FAMILY"); - myPatientDao.update(p); + IIdType update1Id = myPatientDao.update(p).getId(); + assertEquals("2", update1Id.getVersionIdPart()); p = new Patient(); - p.setId(id); + p.setId(id.getIdPart()); p.setActive(false); p.addName().setFamily("FAMILY2"); - myPatientDao.update(p); + IIdType update2Id = myPatientDao.update(p).getId(); + assertEquals("3", update2Id.getVersionIdPart()); + assertEquals(update1Id.getIdPart(), update2Id.getIdPart()); + }); + + runInTransaction(()->{ + List resourceTables = myResourceTableDao.findAll(); + assertEquals(1, resourceTables.size()); + assertEquals(2, resourceTables.get(0).getVersion()); + + List versions = myResourceHistoryTableDao.findAllVersionsForResourceIdInOrder(resourceTables.get(0).getResourceId()); + assertThat(versions.stream().map(ResourceHistoryTable::getVersion).toList()).asList().containsExactly(1L, 2L, 3L); }); assertEquals(1, myPatientDao.search(SearchParameterMap.newSynchronous("name", new StringParam("family2"))).size()); @@ -1193,62 +1206,29 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test { } - @Test - public void testCreateWithIllegalReference() { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + public void testCreateWithIllegalReference_InvalidTypeForElement(boolean theClientAssignedId) { + Observation o1 = new Observation(); o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01"); - IIdType id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless(); + IIdType id1; + if (theClientAssignedId) { + o1.setId("testCreateWithIllegalReference"); + id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless(); + } else { + id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless(); + } try { Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(id1); + p.getManagingOrganization().setReference(id1.getValue()); myPatientDao.create(p, mySrd); - fail(); + fail(""); } catch (UnprocessableEntityException e) { assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage()); } - try { - Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(new IdType("Organization", id1.getIdPart())); - myPatientDao.create(p, mySrd); - fail(); - } catch (UnprocessableEntityException e) { - assertEquals(Msg.code(1095) + "Resource contains reference to unknown resource ID Organization/" + id1.getIdPart(), e.getMessage()); - } - - // Now with a forced ID - - o1 = new Observation(); - o1.setId("testCreateWithIllegalReference"); - o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01"); - id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless(); - - try { - Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(id1); - myPatientDao.create(p, mySrd); - fail(); - } catch (UnprocessableEntityException e) { - assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage()); - } - - try { - Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(new IdType("Organization", id1.getIdPart())); - myPatientDao.create(p, mySrd); - fail(); - } catch (InvalidRequestException e) { - assertEquals(Msg.code(1094) + "Resource Organization/testCreateWithIllegalReference not found, specified in path: Patient.managingOrganization", e.getMessage()); - } - - // Disable validation - myStorageSettings.setEnforceReferenceTargetTypes(false); - Patient p = new Patient(); - p.getManagingOrganization().setReferenceElement(id1); - myPatientDao.create(p, mySrd); - - } @Test @@ -2803,7 +2783,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test { myPatientDao.read(new IdType("Patient/9999999999999/_history/1"), mySrd); fail(); } catch (ResourceNotFoundException e) { - assertEquals(Msg.code(1996) + "Resource Patient/9999999999999/_history/1 is not known", e.getMessage()); + assertEquals(Msg.code(2001) + "Resource Patient/9999999999999 is not known", e.getMessage()); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java index 65eb5737e2b..7cd70ab49e6 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java @@ -32,6 +32,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.OperationOutcomeUtil; import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.validation.IValidatorModule; +import ca.uhn.test.util.LogbackTestExtension; import ca.uhn.test.util.LogbackTestExtensionAssert; import ch.qos.logback.classic.Level; import org.apache.commons.io.IOUtils; @@ -85,6 +86,7 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.springframework.beans.factory.annotation.Autowired; @@ -113,6 +115,10 @@ import static org.mockito.Mockito.when; public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4ValidateTest.class); + + @RegisterExtension + public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension(); + @Autowired private IValidatorModule myValidatorModule; @Autowired diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java index 972c7864d4d..737c764724b 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java @@ -169,9 +169,10 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { List selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); assertEquals(1, selectQueries.size()); // Look up the referenced subject/patient - String sql = selectQueries.get(0).getSql(true, false).toLowerCase(); - assertThat(sql).contains(" from hfj_resource "); - assertEquals(2, StringUtils.countMatches(selectQueries.get(0).getSql(true, false).toLowerCase(), "partition")); + String searchSql = selectQueries.get(0).getSql(true, false); + assertThat(searchSql).contains(" from HFJ_RESOURCE "); + assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID from"), searchSql); + assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql); runInTransaction(() -> { List resLinks = myResourceLinkDao.findAll(); @@ -180,7 +181,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { assertEquals(obsId.getIdPartAsLong(), resLinks.get(0).getSourceResourcePid()); assertEquals(patientId.getIdPartAsLong(), resLinks.get(0).getTargetResourcePid()); assertEquals(myPartitionId, resLinks.get(0).getTargetResourcePartitionId().getPartitionId()); - assertLocalDateFromDbMatches(myPartitionDate, resLinks.get(0).getTargetResourcePartitionId().getPartitionDate()); }); } @@ -466,7 +466,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { assertEquals(myPartitionId, resourceLinks.get(0).getPartitionId().getPartitionId().intValue()); assertLocalDateFromDbMatches(myPartitionDate, resourceLinks.get(0).getPartitionId().getPartitionDate()); assertEquals(myPartitionId, resourceLinks.get(0).getTargetResourcePartitionId().getPartitionId().intValue()); - assertLocalDateFromDbMatches(myPartitionDate, resourceLinks.get(0).getTargetResourcePartitionId().getPartitionDate()); // HFJ_RES_PARAM_PRESENT List presents = mySearchParamPresentDao.findAllForResource(resourceTable); @@ -876,7 +875,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { IdType gotId1 = myPatientDao.read(patientId1, mySrd).getIdElement().toUnqualifiedVersionless(); assertEquals(patientId1, gotId1); - String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true); + String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true); ourLog.info("Search SQL:\n{}", searchSql); // Only the read columns should be used, no criteria use partition @@ -888,7 +887,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { IdType gotId2 = myPatientDao.read(patientId2, mySrd).getIdElement().toUnqualifiedVersionless(); assertEquals(patientId2, gotId2); - String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true); + String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true); ourLog.info("Search SQL:\n{}", searchSql); // Only the read columns should be used, no criteria use partition @@ -925,7 +924,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { myPatientDao.read(patientIdNull, mySrd).getIdElement().toUnqualifiedVersionless(); fail(); } catch (ResourceNotFoundException e) { - assertThat(e.getMessage()).matches(Msg.code(1996) + "Resource Patient/[0-9]+ is not known"); + assertThat(e.getMessage()).matches(Msg.code(2001) + "Resource Patient/[0-9]+ is not known"); } } @@ -936,7 +935,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { myPatientDao.read(patientId2, mySrd).getIdElement().toUnqualifiedVersionless(); fail(); } catch (ResourceNotFoundException e) { - assertThat(e.getMessage()).matches(Msg.code(1996) + "Resource Patient/[0-9]+ is not known"); + assertThat(e.getMessage()).matches(Msg.code(2001) + "Resource Patient/[0-9]+ is not known"); } } } @@ -948,6 +947,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { createPatient(withPartition(2), withActiveTrue()); IIdType patientId3 = createPatient(withPartition(3), withActiveTrue()); + logAllResources(); + // Two partitions - Found { myCaptureQueriesListener.clear(); @@ -957,22 +958,30 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { assertEquals(patientId1, gotId1); // Only the read columns should be used, but no selectors on partition ID - String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true); - assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql); + String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); + assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID from"), searchSql); assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID in ("), searchSql); + assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql); } // Two partitions including default - Found { myCaptureQueriesListener.clear(); myPartitionInterceptor.addReadPartition(RequestPartitionId.fromPartitionNames(PARTITION_1, JpaConstants.DEFAULT_PARTITION_NAME)); - IdType gotId1 = myPatientDao.read(patientIdNull, mySrd).getIdElement().toUnqualifiedVersionless(); + IdType gotId1; + try { + gotId1 = myPatientDao.read(patientIdNull, mySrd).getIdElement().toUnqualifiedVersionless(); + } finally { + myCaptureQueriesListener.logSelectQueries(); + } assertEquals(patientIdNull, gotId1); // Only the read columns should be used, but no selectors on partition ID - String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true); - assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql); + String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); + assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID from"), searchSql); + assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID in ("), searchSql); assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID is null"), searchSql); + assertEquals(3, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql); } // Two partitions - Not Found @@ -1010,10 +1019,15 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { IdType gotId1 = myPatientDao.read(patientId1, mySrd).getIdElement().toUnqualifiedVersionless(); assertEquals(patientId1, gotId1); - // Only the read columns should be used, but no selectors on partition ID - String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true); + String resolveIdentitySql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); + assertEquals(1, StringUtils.countMatches(resolveIdentitySql, "PARTITION_ID from"), resolveIdentitySql); + assertEquals(1, StringUtils.countMatches(resolveIdentitySql, "PARTITION_ID in ("), resolveIdentitySql); + assertEquals(2, StringUtils.countMatches(resolveIdentitySql, "PARTITION_ID"), resolveIdentitySql); + + String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false); assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql); assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID in ("), searchSql); + assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql); } // Two partitions including default - Found @@ -1025,7 +1039,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { assertEquals(patientIdNull, gotId1); // Only the read columns should be used, but no selectors on partition ID - String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true); + String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true); assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql); assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID is null"), searchSql); } @@ -1079,7 +1093,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { myPatientDao.read(patientId1, mySrd).getIdElement().toUnqualifiedVersionless(); fail(); } catch (ResourceNotFoundException e) { - assertThat(e.getMessage()).matches(Msg.code(1996) + "Resource Patient/[0-9]+ is not known"); + assertThat(e.getMessage()).matches(Msg.code(2001) + "Resource Patient/[0-9]+ is not known"); } } } @@ -1269,6 +1283,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { IIdType gotId1 = searchOutcome.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless(); assertEquals(patientId1, gotId1); + myCaptureQueriesListener.logSelectQueries(); String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); ourLog.info("Search SQL:\n{}", searchSql); @@ -1321,6 +1336,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { IIdType gotId1 = searchOutcome.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless(); assertEquals(patientId1, gotId1); + myCaptureQueriesListener.logSelectQueries(); String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); ourLog.info("Search SQL:\n{}", searchSql); @@ -1380,9 +1396,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { ourLog.info("Search SQL:\n{}", searchSql); // Only the read columns should be used, no criteria use partition - assertThat(searchSql).as(searchSql).contains("PARTITION_ID IN ('1')"); - assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID,")).as(searchSql).isEqualTo(1); - assertThat(StringUtils.countMatches(searchSql, "PARTITION_DATE")).as(searchSql).isEqualTo(1); + assertThat(searchSql).as(searchSql).contains("PARTITION_ID = '1'"); + assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID")).as(searchSql).isEqualTo(2); } // Read in null Partition @@ -1430,20 +1445,13 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { assertEquals(1, searchOutcome.size()); IIdType gotId1 = searchOutcome.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless(); assertEquals(patientId1, gotId1); + myCaptureQueriesListener.logSelectQueries(); - // First SQL resolves the forced ID + // Performs the search String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false).toUpperCase(); ourLog.info("Search SQL:\n{}", searchSql); - assertThat(searchSql).as(searchSql).contains("PARTITION_ID IN ('1')"); - assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID,")).as(searchSql).isEqualTo(1); - assertThat(StringUtils.countMatches(searchSql, "PARTITION_DATE")).as(searchSql).isEqualTo(1); - - // Second SQL performs the search - - searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false).toUpperCase(); - ourLog.info("Search SQL:\n{}", searchSql); + assertThat(searchSql).as(searchSql).contains("FROM HFJ_RESOURCE "); assertThat(searchSql).as(searchSql).contains("PARTITION_ID = '1'"); - assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID")).as(searchSql).isEqualTo(3); } // Read in null Partition @@ -2096,9 +2104,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test { String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false); ourLog.info("Search SQL:\n{}", searchSql); - assertThat(searchSql).as(searchSql).contains("PARTITION_ID in ('1')"); - assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID,")).as(searchSql).isEqualTo(1); - assertThat(StringUtils.countMatches(searchSql, "PARTITION_DATE")).as(searchSql).isEqualTo(1); + assertThat(searchSql).as(searchSql).contains("PARTITION_ID = '1'"); + assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID")).as(searchSql).isEqualTo(2); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientCompartmentEnforcingInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientCompartmentEnforcingInterceptorTest.java index 4c5f49942a7..9f65a4f9993 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientCompartmentEnforcingInterceptorTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientCompartmentEnforcingInterceptorTest.java @@ -43,20 +43,25 @@ public class PatientCompartmentEnforcingInterceptorTest extends BaseResourceProv myPartitionSettings.setDefaultPartitionId(ALTERNATE_DEFAULT_ID); } + @Override @AfterEach - public void after() { + public void after() throws Exception { + super.after(); myInterceptorRegistry.unregisterInterceptor(myPatientIdPartitionInterceptor); myInterceptorRegistry.unregisterInterceptor(myForceOffsetSearchModeInterceptor); myInterceptorRegistry.unregisterInterceptor(mySvc); myPartitionSettings.setPartitioningEnabled(false); - myPartitionSettings.setUnnamedPartitionMode(new PartitionSettings().isUnnamedPartitionMode()); - myPartitionSettings.setDefaultPartitionId(new PartitionSettings().getDefaultPartitionId()); + PartitionSettings defaultPartitionSettings = new PartitionSettings(); + myPartitionSettings.setUnnamedPartitionMode(defaultPartitionSettings.isUnnamedPartitionMode()); + myPartitionSettings.setDefaultPartitionId(defaultPartitionSettings.getDefaultPartitionId()); + myPartitionSettings.setAllowReferencesAcrossPartitions(defaultPartitionSettings.getAllowReferencesAcrossPartitions()); } @Test public void testUpdateResource_whenCrossingPatientCompartment_throws() { + myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); createPatientA(); createPatientB(); diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java index c2a49a3f6a5..aaaa937a3dd 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java @@ -188,9 +188,8 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes Patient patient = myPatientDao.read(new IdType("Patient/A"), mySrd); assertTrue(patient.getActive()); myCaptureQueriesListener.logSelectQueries(); - assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(3); - assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("rt1_0.PARTITION_ID in (?)"); - assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false)).contains("where rt1_0.PARTITION_ID=? and rt1_0.RES_ID=?"); + assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(2); + assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("rt1_0.PARTITION_ID=?"); } @Test @@ -223,9 +222,8 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes myCaptureQueriesListener.logSelectQueriesForCurrentThread(); List selectQueriesForCurrentThread = myCaptureQueriesListener.getSelectQueriesForCurrentThread(); - assertEquals(4, selectQueriesForCurrentThread.size()); - assertThat(selectQueriesForCurrentThread.get(0).getSql(false, false)).contains("PARTITION_ID in (?)"); - assertThat(selectQueriesForCurrentThread.get(1).getSql(false, false)).contains("PARTITION_ID="); + assertEquals(3, selectQueriesForCurrentThread.size()); + assertThat(selectQueriesForCurrentThread.get(0).getSql(false, false)).contains("PARTITION_ID=?"); } @@ -237,9 +235,8 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes IBundleProvider outcome = myPatientDao.search(SearchParameterMap.newSynchronous("_id", new TokenParam("A")), mySrd); assertEquals(1, outcome.size()); myCaptureQueriesListener.logSelectQueries(); - assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(3); - assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("rt1_0.PARTITION_ID in (?)"); - assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false)).contains("t0.PARTITION_ID = ?"); + assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(2); + assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("PARTITION_ID = ?"); } @Test @@ -352,9 +349,9 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes IBundleProvider outcome = myOrganizationDao.history(new IdType("Organization/C"), null, null, null, mySrd); myCaptureQueriesListener.logSelectQueries(); assertEquals(2, outcome.size()); - assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(3); - assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("PARTITION_ID in "); - assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false)).contains("PARTITION_ID="); + assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(2); + assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("PARTITION_ID=?"); + assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false)).contains("PARTITION_ID in (?)"); } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4IT.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4IT.java index e75e3047b82..d058bb16b90 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4IT.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4IT.java @@ -137,6 +137,7 @@ public class ConsentInterceptorResourceProviderR4IT extends BaseResourceProvider myInterceptorRegistry.unregisterInterceptor(myBinaryStorageInterceptor); } + @Test public void testSearchAndBlockSomeWithReject() { create50Observations(); @@ -960,7 +961,12 @@ public class ConsentInterceptorResourceProviderR4IT extends BaseResourceProvider String fhirType = theResource.fhirType(); IFhirResourceDao dao = myDaoRegistry.getResourceDao(fhirType); String currentTransactionName = TransactionSynchronizationManager.getCurrentTransactionName(); - dao.read(theResource.getIdElement()); + try { + dao.read(theResource.getIdElement()); + } catch (ResourceNotFoundException e) { + // this is expected, the resource isn't saved in the DB yet. But it shouldn't + // be an NPE, only a ResourceNotFoundException + } return ConsentOutcome.PROCEED; } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantServerR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantServerR4Test.java index f9b18495041..c01fa52f1b7 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantServerR4Test.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/MultitenantServerR4Test.java @@ -531,7 +531,8 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te myPatientDao.update((Patient) patientA, requestDetails); fail(); } catch (InvalidRequestException e) { - assertEquals(Msg.code(2079) + "Resource " + ((Patient) patientA).getResourceType() + "/" + ((Patient) patientA).getIdElement().getIdPart() + " is not known", e.getMessage()); + String idPart = ((Patient) patientA).getIdElement().getIdPart(); + assertThat(e.getMessage()).contains("HAPI-0960: Can not create resource with ID[" + idPart + "]"); } } diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderRevIncludeTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderRevIncludeTest.java index d531251f661..62657492c87 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderRevIncludeTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderRevIncludeTest.java @@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.util.SqlQueryList; import ca.uhn.fhir.model.api.Include; import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.util.BundleUtil; +import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Bundle; @@ -39,7 +40,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test { - class SqlCapturingInterceptor { + static class SqlCapturingInterceptor { SqlQueryList queryList = null; @Hook(Pointcut.JPA_PERFTRACE_RAW_SQL) public void trackRequest(RequestDetails theRequestDetails, SqlQueryList theQueryList) { @@ -89,7 +90,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test { Bundle bundle = myClient.search() .forResource(Patient.class) .count(200) - .where(Patient.RES_ID.exactly().codes(pid.getIdPart())) + .where(IAnyResource.RES_ID.exactly().codes(pid.getIdPart())) .revInclude(new Include("CareTeam:subject:Group").setRecurse(true)) // .revInclude(new Include("CareTeam:subject:Group")) .revInclude(new Include("Group:member:Patient")) @@ -128,7 +129,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test { } break; default: - ourLog.warn(type + " found but not expected"); + ourLog.warn("{} found but not expected", type); } if (patientFound && groupFound && careTeamFound && detectedIssueFound) { @@ -144,7 +145,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test { assertEquals(methodName, patient.getName().get(0).getFamily()); //Ensure that the revincludes are included in the query list of the sql trace. - //TODO GGG/KHS reduce this to something less than 6 by smarter iterating and getting the resource types earlier when needed. + //TODO GGG/KHS reduce this to something less than 5 by smarter iterating and getting the resource types earlier when needed. assertThat(sqlCapturingInterceptor.getQueryList()).hasSize(5); myInterceptorRegistry.unregisterInterceptor(sqlCapturingInterceptor); } @@ -166,7 +167,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test { // DetectedIssue?_id=123&_include=DetectedIssue:author&_revinclude=PractitionerRole:practitioner Bundle bundle = myClient.search() .forResource(DetectedIssue.class) - .where(DetectedIssue.RES_ID.exactly().codes(detectedIssueId.getIdPart())) + .where(IAnyResource.RES_ID.exactly().codes(detectedIssueId.getIdPart())) .include(new Include("DetectedIssue:author")) .revInclude(new Include("PractitionerRole:practitioner").setRecurse(true)) .returnBundle(Bundle.class) diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java index ceadc2c1709..081492fb569 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java @@ -292,11 +292,11 @@ public class GiantTransactionPerfTest { ourLog.info("Merges:\n * " + myEntityManager.myMergeCount.stream().map(t->t.toString()).collect(Collectors.joining("\n * "))); - assertThat(myEntityManager.myPersistCount.stream().map(t -> t.getClass().getSimpleName()).collect(Collectors.toList())).containsExactly("ResourceTable"); + assertThat(myEntityManager.myPersistCount.stream().map(t -> t.getClass().getSimpleName()).collect(Collectors.toList())).containsExactly("ResourceTable", "ResourceHistoryTable"); assertThat(myEntityManager.myMergeCount.stream().map(t -> t.getClass().getSimpleName()).collect(Collectors.toList())).containsExactlyInAnyOrder("ResourceTable", "ResourceIndexedSearchParamToken", "ResourceIndexedSearchParamToken"); assertEquals(1, myEntityManager.myFlushCount); assertEquals(1, myResourceVersionSvc.myGetVersionMap); - assertEquals(1, myResourceHistoryTableDao.mySaveCount); + assertEquals(0, myResourceHistoryTableDao.mySaveCount); } @Test @@ -632,12 +632,12 @@ public class GiantTransactionPerfTest { @Override public FlushModeType getFlushMode() { - throw new UnsupportedOperationException(); + return FlushModeType.AUTO; } @Override public void setFlushMode(FlushModeType flushMode) { - throw new UnsupportedOperationException(); + // ignore } @Override diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/async/AsyncSubscriptionMessageSubmissionIT.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/async/AsyncSubscriptionMessageSubmissionIT.java index 923657ae47a..598bdcad1cc 100644 --- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/async/AsyncSubscriptionMessageSubmissionIT.java +++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/async/AsyncSubscriptionMessageSubmissionIT.java @@ -17,6 +17,7 @@ import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionMatcherInterc import ca.uhn.fhir.jpa.subscription.submit.interceptor.SynchronousSubscriptionMatcherInterceptor; import ca.uhn.fhir.jpa.test.util.StoppableSubscriptionDeliveringRestHookSubscriber; import ca.uhn.fhir.rest.api.server.SystemRequestDetails; +import ca.uhn.test.util.LogbackTestExtension; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; import com.fasterxml.jackson.core.JsonProcessingException; @@ -28,6 +29,7 @@ import org.hl7.fhir.r4.model.Subscription; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -48,6 +50,9 @@ import static org.junit.jupiter.api.Assertions.assertFalse; public class AsyncSubscriptionMessageSubmissionIT extends BaseSubscriptionsR4Test { private static final Logger ourLog = LoggerFactory.getLogger(AsyncSubscriptionMessageSubmissionIT.class); + @RegisterExtension + public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension(AsyncResourceModifiedSubmitterSvc.class); + @SpyBean IResourceModifiedConsumer myResourceModifiedConsumer; diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml index c4d3ff5e6ec..e84136c59f7 100644 --- a/hapi-fhir-jpaserver-test-r4b/pom.xml +++ b/hapi-fhir-jpaserver-test-r4b/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml index 3744bb1ac1c..9ebac21f0b6 100644 --- a/hapi-fhir-jpaserver-test-r5/pom.xml +++ b/hapi-fhir-jpaserver-test-r5/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java index 4cf1ea982c5..63e6ab0b518 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirSystemDaoTransactionR5Test.java @@ -2,12 +2,20 @@ package ca.uhn.fhir.jpa.dao.r5; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; +import ca.uhn.fhir.jpa.entity.TermCodeSystem; +import ca.uhn.fhir.jpa.entity.TermConceptMap; +import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; +import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; +import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.BundleBuilder; import jakarta.annotation.Nonnull; import org.hl7.fhir.r5.model.BooleanType; import org.hl7.fhir.r5.model.Bundle; +import org.hl7.fhir.r5.model.CodeSystem; import org.hl7.fhir.r5.model.CodeType; +import org.hl7.fhir.r5.model.ConceptMap; +import org.hl7.fhir.r5.model.Enumerations; import org.hl7.fhir.r5.model.IdType; import org.hl7.fhir.r5.model.Observation; import org.hl7.fhir.r5.model.Parameters; @@ -15,6 +23,7 @@ import org.hl7.fhir.r5.model.Patient; import org.hl7.fhir.r5.model.Quantity; import org.hl7.fhir.r5.model.Reference; import org.hl7.fhir.r5.model.UriType; +import org.hl7.fhir.r5.model.ValueSet; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -22,14 +31,17 @@ import org.junit.jupiter.params.provider.CsvSource; import org.junit.jupiter.params.provider.ValueSource; import java.io.IOException; +import java.util.List; import java.util.UUID; import static org.apache.commons.lang3.StringUtils.countMatches; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.when; public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { @@ -363,21 +375,21 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { myCaptureQueriesListener.logSelectQueries(); if (theTargetAlreadyExists) { if (theResourceChanges) { - assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); } else { - assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(5, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); } } else { if (theResourceChanges) { - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(7, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); } else { - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); assertEquals(7, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); } @@ -488,14 +500,14 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertNull(outcome.getEntry().get(0).getResponse().getLocation()); assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus()); - assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/2"); + assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/3"); assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus()); logAllResources(); logAllResourceVersions(); actual = myPatientDao.read(resourceId, mySrd); - assertEquals("2", actual.getIdElement().getVersionIdPart()); + assertEquals("3", actual.getIdElement().getVersionIdPart()); assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem()); assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem()); @@ -505,11 +517,11 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertNull(outcome.getEntry().get(0).getResponse().getLocation()); assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus()); - assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/3"); + assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/5"); assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus()); actual = myPatientDao.read(resourceId, mySrd); - assertEquals("3", actual.getIdElement().getVersionIdPart()); + assertEquals("5", actual.getIdElement().getVersionIdPart()); assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem()); assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem()); } @@ -525,7 +537,6 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { } - @Test public void testConditionalDeleteAndConditionalUpdateOnSameResource_MultipleMatchesAlreadyExist() { @@ -558,7 +569,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { // First pass (resource doesn't already exist) - outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext)); + outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext, "FAMILY-0")); assertNull(outcome.getEntry().get(0).getResponse().getLocation()); assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus()); assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/1"); @@ -569,23 +580,26 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { assertEquals("1", actual.getIdElement().getVersionIdPart()); assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem()); assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem()); + assertEquals("FAMILY-0", actual.getNameFirstRep().getFamily()); // Second pass (resource already exists) myCaptureQueriesListener.clear(); - outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext)); + outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext, "FAMILY-1")); myCaptureQueriesListener.logUpdateQueries(); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertNull(outcome.getEntry().get(0).getResponse().getLocation()); assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus()); assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/1"); assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus()); + resourceId = new IdType(outcome.getEntry().get(1).getResponse().getLocation()).toUnqualifiedVersionless(); + actual = myPatientDao.read(resourceId, mySrd); + assertEquals("FAMILY-1", actual.getNameFirstRep().getFamily()); logAllResources(); logAllResourceVersions(); IdType resourceId2 = new IdType(outcome.getEntry().get(1).getResponse().getLocation()).toUnqualifiedVersionless(); - assertThat(resourceId2.getIdPart()).isNotEqualTo(resourceId.getIdPart()); actual = myPatientDao.read(resourceId2, mySrd); assertEquals("1", actual.getIdElement().getVersionIdPart()); assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem()); @@ -593,7 +607,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { // Third pass (resource already exists) - outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext)); + outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext, "FAMILY-2")); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertNull(outcome.getEntry().get(0).getResponse().getLocation()); assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus()); @@ -606,6 +620,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { assertEquals("1", actual.getIdElement().getVersionIdPart()); assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem()); assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem()); + assertEquals("FAMILY-2", actual.getNameFirstRep().getFamily()); } /** @@ -632,22 +647,32 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem()); assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem()); + logAllResources(); + logAllResourceVersions(); + // Second pass (resource already exists) myCaptureQueriesListener.clear(); - outcome = mySystemDao.transaction(mySrd, createBundleWithDeleteAndUpdateOnSameResource(myFhirContext)); + try { + outcome = mySystemDao.transaction(mySrd, createBundleWithDeleteAndUpdateOnSameResource(myFhirContext)); + } finally { + myCaptureQueriesListener.logInsertQueries(); + } myCaptureQueriesListener.logUpdateQueries(); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); assertEquals("Patient/P/_history/2", outcome.getEntry().get(0).getResponse().getLocation()); assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus()); - assertEquals("Patient/P/_history/2", outcome.getEntry().get(1).getResponse().getLocation()); + assertEquals("Patient/P/_history/3", outcome.getEntry().get(1).getResponse().getLocation()); assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus()); logAllResources(); logAllResourceVersions(); - actual = myPatientDao.read(resourceId, mySrd); - assertEquals("2", actual.getIdElement().getVersionIdPart()); + assertThrows(ResourceGoneException.class, ()->myPatientDao.read(resourceId.withVersion("2"), mySrd)); + actual = myPatientDao.read(resourceId.withVersion("3"), mySrd); + assertEquals("3", actual.getIdElement().getVersionIdPart()); + actual = myPatientDao.read(resourceId.toVersionless(), mySrd); + assertEquals("3", actual.getIdElement().getVersionIdPart()); assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem()); assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem()); @@ -655,26 +680,47 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { outcome = mySystemDao.transaction(mySrd, createBundleWithDeleteAndUpdateOnSameResource(myFhirContext)); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); - assertEquals("Patient/P/_history/3", outcome.getEntry().get(0).getResponse().getLocation()); + assertEquals("Patient/P/_history/4", outcome.getEntry().get(0).getResponse().getLocation()); assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus()); - assertEquals("Patient/P/_history/3", outcome.getEntry().get(1).getResponse().getLocation()); + assertEquals("Patient/P/_history/5", outcome.getEntry().get(1).getResponse().getLocation()); assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus()); - actual = myPatientDao.read(resourceId, mySrd); - assertEquals("3", actual.getIdElement().getVersionIdPart()); + actual = myPatientDao.read(resourceId.withVersion("5"), mySrd); + assertEquals("5", actual.getIdElement().getVersionIdPart()); assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem()); assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem()); } + @Test + public void testDeleteThenConditionalCreateOnTheSameResource() { + // Create a patient + myPatientDao.update(createPatientWithIdentifierAndTag().setActive(true), mySrd); + + // Transaction which deletes that patient by ID and then recreates it conditionally + BundleBuilder bb = new BundleBuilder(myFhirContext); + bb.addTransactionDeleteEntry(new IdType("Patient/P")); + bb.addTransactionUpdateEntry(createPatientWithIdentifierAndTag().setActive(false)).conditional("Patient?identifier=http://foo|bar"); + Bundle outcome = mySystemDao.transaction(mySrd, bb.getBundleTyped()); + ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); + + assertThat(outcome.getEntry().get(0).getResponse().getLocation()).endsWith("_history/2"); + assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/3"); + + // Verify + Patient patient = myPatientDao.read(new IdType("Patient/P"), mySrd); + assertFalse(patient.getActive()); + assertEquals("3", patient.getIdElement().getVersionIdPart()); + } + /** * See #5110 */ @Test public void testTransactionWithMissingSystem() { - BundleBuilder bb = new BundleBuilder(myFhirContext); + BundleBuilder bb = new BundleBuilder(myFhirContext); Patient patient = new Patient(); patient.setId(IdType.newRandomUuid()); @@ -685,13 +731,149 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { patient.addName().setText("Jane Doe"); bb.addTransactionCreateEntry(patient); - Bundle inputBundle = bb.getBundleTyped(); + Bundle inputBundle = bb.getBundleTyped(); Bundle outputBundle = mySystemDao.transaction(mySrd, inputBundle); assertThat(outputBundle.getEntry().get(0).getResponse().getLocation()).matches("Patient/[0-9]+/_history/1"); } + /** + * Make sure we can successfully handle this in a transaction, including + * creating the extra table rows for this resource type + */ + @Test + public void testCreateCodeSystem() { + CodeSystem cs = newDummyCodeSystem(); + + BundleBuilder bb = new BundleBuilder(myFhirContext); + bb.addTransactionCreateEntry(cs); + + // Test + mySystemDao.transaction(mySrd, bb.getBundleTyped()); + + // Verify + runInTransaction(() -> { + List valueSets = myTermCodeSystemDao.findAll(); + assertThat(valueSets).hasSize(1); + }); + } + + /** + * Make sure we can successfully handle this in a transaction, including + * creating the extra table rows for this resource type + */ + @Test + public void testCreateConceptMap() { + ConceptMap cm = newDummyConceptMap(); + + BundleBuilder bb = new BundleBuilder(myFhirContext); + bb.addTransactionCreateEntry(cm); + + // Test + mySystemDao.transaction(mySrd, bb.getBundleTyped()); + + // Verify + runInTransaction(() -> { + List valueSets = myTermConceptMapDao.findAll(); + assertThat(valueSets).hasSize(1); + }); + } + + /** + * Make sure we can successfully handle this in a transaction, including + * creating the extra table rows for this resource type + */ + @Test + public void testCreateValueSet() { + ValueSet vs = newDummyValueSet(); + + BundleBuilder bb = new BundleBuilder(myFhirContext); + bb.addTransactionCreateEntry(vs); + + // Test + mySystemDao.transaction(mySrd, bb.getBundleTyped()); + + // Verify + runInTransaction(() -> { + List valueSets = myTermValueSetDao.findAll(); + assertThat(valueSets).hasSize(1); + }); + } + + /** + * Two resources with the same URL in a single transaction + * bundle should error gracefully + */ + @Test + public void testCreateDuplicateCodeSystem() { + + BundleBuilder bb = new BundleBuilder(myFhirContext); + bb.addTransactionCreateEntry(newDummyCodeSystem()); + bb.addTransactionCreateEntry(newDummyCodeSystem()); + + // Test + try { + mySystemDao.transaction(mySrd, bb.getBundleTyped()); + fail(); + } catch (UnprocessableEntityException e) { + // Verify + assertThat(e.getMessage()).contains("Can not create multiple CodeSystem resources with CodeSystem.url"); + } + + } + + /** + * Two resources with the same URL in a single transaction + * bundle should error gracefully + */ + @Test + public void testCreateDuplicateValueSet() { + + BundleBuilder bb = new BundleBuilder(myFhirContext); + bb.addTransactionCreateEntry(newDummyValueSet()); + bb.addTransactionCreateEntry(newDummyValueSet()); + + // Test + try { + mySystemDao.transaction(mySrd, bb.getBundleTyped()); + fail(); + } catch (UnprocessableEntityException e) { + // Verify + assertThat(e.getMessage()).contains("Can not create multiple ValueSet resources with ValueSet.url"); + } + + } + + @Nonnull + private static CodeSystem newDummyCodeSystem() { + CodeSystem cs = new CodeSystem(); + cs.setUrl("http://foo"); + cs.setVersion("1.2.3"); + cs.setStatus(Enumerations.PublicationStatus.ACTIVE); + cs.addConcept().setCode("HELLO"); + return cs; + } + + @Nonnull + private static ConceptMap newDummyConceptMap() { + ConceptMap cm = new ConceptMap(); + cm.setUrl("http://foo"); + cm.setVersion("1.2.3"); + cm.setStatus(Enumerations.PublicationStatus.ACTIVE); + return cm; + } + + @Nonnull + private static ValueSet newDummyValueSet() { + ValueSet vs = new ValueSet(); + vs.setUrl("http://foo"); + vs.setVersion("1.2.3"); + vs.setStatus(Enumerations.PublicationStatus.ACTIVE); + vs.getCompose().addInclude().setSystem("http://hl7.org/fhir/administrative-gender"); + return vs; + } + @Nonnull private static Bundle createBundleWithConditionalDeleteAndConditionalUpdateOnSameResource(FhirContext theFhirContext) { @@ -705,13 +887,14 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test { } @Nonnull - private static Bundle createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(FhirContext theFhirContext) { + private static Bundle createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(FhirContext theFhirContext, String theFamilyName) { // Build a new bundle each time we need it BundleBuilder bb = new BundleBuilder(theFhirContext); bb.addTransactionDeleteConditionalEntry("Patient?identifier=http://foo|bar"); Patient patient = createPatientWithIdentifierAndTag(); - patient.setId((String)null); + patient.setId((String) null); + patient.getNameFirstRep().setFamily(theFamilyName); bb.addTransactionCreateEntry(patient).conditional("Patient?identifier=http://foo|bar"); return bb.getBundleTyped(); } diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java index 8cf00effbfc..e10ee90094b 100644 --- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java +++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/UpliftedRefchainsAndChainedSortingR5Test.java @@ -247,9 +247,9 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { // Verify SQL - // 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference + // 2- Resolve Practitioner/PR1 reference myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); // Verify correct indexes are written @@ -291,9 +291,9 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { // Verify SQL - // 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference + // 1- Resolve Practitioner/PR1 reference myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); // Verify correct indexes are written @@ -337,7 +337,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { // 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); // Verify correct indexes are written @@ -382,7 +382,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { // 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); // Verify correct indexes are written @@ -426,7 +426,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { // Verify SQL // 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference - assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); // Verify correct indexes are written @@ -476,7 +476,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test { // Verify SQL myCaptureQueriesListener.logSelectQueriesForCurrentThread(); - assertEquals(10, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); + assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread()); // Verify correct indexes are written diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml index e5e819802dc..1ac2a59d4e2 100644 --- a/hapi-fhir-jpaserver-test-utilities/pom.xml +++ b/hapi-fhir-jpaserver-test-utilities/pom.xml @@ -6,7 +6,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java index 685d575c9d9..f379ee2cf0a 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java @@ -566,9 +566,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil @RegisterExtension private final PreventDanglingInterceptorsExtension myPreventDanglingInterceptorsExtension = new PreventDanglingInterceptorsExtension(()-> myInterceptorRegistry); - @RegisterExtension - public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension(); - @AfterEach() @Order(0) public void afterCleanupDao() { diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java index de13d10b2a9..dba57255f05 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java @@ -20,7 +20,9 @@ package ca.uhn.fhir.jpa.test; import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.ParserOptions; import ca.uhn.fhir.context.support.IValidationSupport; +import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.interceptor.api.Pointcut; @@ -158,6 +160,7 @@ import java.util.stream.Stream; import static ca.uhn.fhir.rest.api.Constants.HEADER_CACHE_CONTROL; import static ca.uhn.fhir.util.TestUtil.doRandomizeLocaleAndTimezone; import static java.util.stream.Collectors.joining; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; import static org.awaitility.Awaitility.await; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -398,9 +401,13 @@ public abstract class BaseJpaTest extends BaseTest { JpaStorageSettings defaultConfig = new JpaStorageSettings(); myStorageSettings.setAdvancedHSearchIndexing(defaultConfig.isAdvancedHSearchIndexing()); myStorageSettings.setAllowContainsSearches(defaultConfig.isAllowContainsSearches()); + myStorageSettings.setDeleteEnabled(defaultConfig.isDeleteEnabled()); myStorageSettings.setIncludeHashIdentityForTokenSearches(defaultConfig.isIncludeHashIdentityForTokenSearches()); myStorageSettings.setMaximumIncludesToLoadPerPage(defaultConfig.getMaximumIncludesToLoadPerPage()); + myStorageSettings.getTreatBaseUrlsAsLocal().clear(); + ParserOptions defaultParserOptions = new ParserOptions(); + myFhirContext.getParserOptions().setStripVersionsFromReferences(defaultParserOptions.isStripVersionsFromReferences()); } @AfterEach @@ -938,7 +945,10 @@ public abstract class BaseJpaTest extends BaseTest { dao.read(theId, mySrd); fail(""); } catch (ResourceNotFoundException e) { - // good + assertThat(e.getMessage()).containsAnyOf( + Msg.code(1996) + "Resource " + theId.toUnqualifiedVersionless().getValue() + " is not known", + Msg.code(2001) + "Resource " + theId.toUnqualifiedVersionless().getValue() + " is not known" + ); } } } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml index 85f3a75d898..37b62d52c3c 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml +++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-server-cds-hooks/pom.xml b/hapi-fhir-server-cds-hooks/pom.xml index 81cf14e2619..0c3871b2a8c 100644 --- a/hapi-fhir-server-cds-hooks/pom.xml +++ b/hapi-fhir-server-cds-hooks/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml index 6d8a5e84722..00258dc7ebd 100644 --- a/hapi-fhir-server-mdm/pom.xml +++ b/hapi-fhir-server-mdm/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSurvivorshipSvcImpl.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSurvivorshipSvcImpl.java index 4cbff60f78b..bbc3db1058d 100644 --- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSurvivorshipSvcImpl.java +++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/svc/MdmSurvivorshipSvcImpl.java @@ -24,7 +24,9 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; +import ca.uhn.fhir.jpa.model.cross.IResourceLookup; import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc; import ca.uhn.fhir.mdm.api.IMdmSurvivorshipService; import ca.uhn.fhir.mdm.api.MdmMatchResultEnum; @@ -40,6 +42,7 @@ import ca.uhn.fhir.util.TerserUtil; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseResource; +import org.hl7.fhir.instance.model.api.IIdType; import org.springframework.data.domain.Page; import java.util.ArrayList; @@ -153,12 +156,15 @@ public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService { Page linksQuery = myMdmLinkQuerySvc.queryLinks(searchParameters, theMdmTransactionContext); // we want it ordered - List sourceIds = new ArrayList<>(); + List sourceIds = new ArrayList<>(); linksQuery.forEach(link -> { String sourceId = link.getSourceId(); - // we want only the id part, not the resource type - sourceId = sourceId.replace(resourceType + "/", ""); - sourceIds.add(sourceId); + if (!sourceId.contains("/")) { + sourceId = resourceType + "/" + sourceId; + } + IIdType id = myFhirContext.getVersion().newIdType(); + id.setValue(sourceId); + sourceIds.add(id); }); Map sourceIdToPid = new HashMap<>(); if (!sourceIds.isEmpty()) { @@ -166,15 +172,19 @@ public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService { myTransactionService .withRequest(new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.allPartitions())) .execute(() -> { - Map ids = - myIIdHelperService.resolveResourcePersistentIds( - RequestPartitionId.allPartitions(), resourceType, sourceIds); - sourceIdToPid.putAll(ids); + Map> ids = myIIdHelperService.resolveResourceIdentities( + RequestPartitionId.allPartitions(), + sourceIds, + ResolveIdentityMode.includeDeleted().cacheOk()); + for (Map.Entry> entry : ids.entrySet()) { + sourceIdToPid.put( + entry.getKey().getIdPart(), entry.getValue().getPersistentId()); + } }); } return sourceIds.stream().map(id -> { - IResourcePersistentId pid = sourceIdToPid.get(id); + IResourcePersistentId pid = sourceIdToPid.get(id.getIdPart()); return dao.readByPid(pid); }); } diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml index acb71ee903b..d0671ee96a5 100644 --- a/hapi-fhir-server-openapi/pom.xml +++ b/hapi-fhir-server-openapi/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml index c03a08d3cca..cb990bf5cfa 100644 --- a/hapi-fhir-server/pom.xml +++ b/hapi-fhir-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml index 7a5eef66ca4..acedc81e55a 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml index 1d5ba28f4c4..e7f7d5b7212 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml @@ -21,7 +21,7 @@ ca.uhn.hapi.fhir hapi-fhir-caching-api - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml index 0645fa5cde7..79741373a8a 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml @@ -7,7 +7,7 @@ hapi-fhir-serviceloaders ca.uhn.hapi.fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml index d3a37e4a403..c54fc19604a 100644 --- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml +++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml @@ -7,7 +7,7 @@ hapi-fhir ca.uhn.hapi.fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../../pom.xml diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml index f525c4d3b6e..314519bf752 100644 --- a/hapi-fhir-serviceloaders/pom.xml +++ b/hapi-fhir-serviceloaders/pom.xml @@ -5,7 +5,7 @@ hapi-deployable-pom ca.uhn.hapi.fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml index 58783925838..6afde2bfd71 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml index e6a5196caeb..018576bb564 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT hapi-fhir-spring-boot-sample-client-apache diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml index 5a54b3ebef8..59b7c769df0 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml index e18cb656477..2ed7be33b1d 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot-samples - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml index 0a76c6bb51f..02a7740e4c9 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir-spring-boot - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml index 5627eb69071..ee4b04cf6e7 100644 --- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml +++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml index 6d1740d48e9..51d5c49944f 100644 --- a/hapi-fhir-spring-boot/pom.xml +++ b/hapi-fhir-spring-boot/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml index f45656c7e70..acca99e04de 100644 --- a/hapi-fhir-sql-migrate/pom.xml +++ b/hapi-fhir-sql-migrate/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml index 1926d00165b..5421f5e2793 100644 --- a/hapi-fhir-storage-batch2-jobs/pom.xml +++ b/hapi-fhir-storage-batch2-jobs/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2-test-utilities/pom.xml b/hapi-fhir-storage-batch2-test-utilities/pom.xml index 0cb4aeb3f64..93b435fc026 100644 --- a/hapi-fhir-storage-batch2-test-utilities/pom.xml +++ b/hapi-fhir-storage-batch2-test-utilities/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml index d25b08f51c7..d662c258d6e 100644 --- a/hapi-fhir-storage-batch2/pom.xml +++ b/hapi-fhir-storage-batch2/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java index 0ecacb9bdf6..19a7397e62f 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/chunk/ResourceIdListWorkChunkJson.java @@ -77,7 +77,8 @@ public class ResourceIdListWorkChunkJson implements IModelJson { .toString(); } - public List getResourcePersistentIds(IIdHelperService theIdHelperService) { + public > List getResourcePersistentIds( + IIdHelperService theIdHelperService) { if (myTypedPids.isEmpty()) { return Collections.emptyList(); } diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml index 2c03762d5ff..6f31b62b9ee 100644 --- a/hapi-fhir-storage-cr/pom.xml +++ b/hapi-fhir-storage-cr/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml index 85765bac654..4cf64576707 100644 --- a/hapi-fhir-storage-mdm/pom.xml +++ b/hapi-fhir-storage-mdm/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml index e3419349a6d..305ca40e896 100644 --- a/hapi-fhir-storage-test-utilities/pom.xml +++ b/hapi-fhir-storage-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml index ed507d49816..d90d02dfa75 100644 --- a/hapi-fhir-storage/pom.xml +++ b/hapi-fhir-storage/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java index 9b9e19034d0..5e3b05a6ad3 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.java @@ -1886,6 +1886,10 @@ public class JpaStorageSettings extends StorageSettings { * deletion can be skipped, which improves performance. This is particularly helpful when large * amounts of data containing client-assigned IDs are being loaded, but it can also improve * search performance. + *

+ * If deletes are disabled, it is also not possible to un-delete a previously deleted + * resource. + *

* * @since 5.0.0 */ @@ -1899,6 +1903,10 @@ public class JpaStorageSettings extends StorageSettings { * deletion can be skipped, which improves performance. This is particularly helpful when large * amounts of data containing client-assigned IDs are being loaded, but it can also improve * search performance. + *

+ * If deletes are disabled, it is also not possible to un-delete a previously deleted + * resource. + *

* * @since 5.0.0 */ diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java index d2146565698..18c1d8eef16 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/IIdHelperService.java @@ -32,6 +32,7 @@ import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; +import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Map; @@ -42,7 +43,7 @@ import java.util.Set; * This interface is used to translate between {@link IResourcePersistentId} * and actual resource IDs. */ -public interface IIdHelperService { +public interface IIdHelperService> { /** * Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs. @@ -56,15 +57,6 @@ public interface IIdHelperService { List resolveResourcePersistentIdsWithCache( @Nonnull RequestPartitionId theRequestPartitionId, List theIds, boolean theOnlyForcedIds); - /** - * Given a resource type and ID, determines the internal persistent ID for the resource. - * - * @throws ResourceNotFoundException If the ID can not be found - */ - @Nonnull - T resolveResourcePersistentIds( - @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId); - /** * Given a resource type and ID, determines the internal persistent ID for a resource. * Optionally filters out deleted resources. @@ -76,16 +68,7 @@ public interface IIdHelperService { @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId, - boolean theExcludeDeleted); - - /** - * Returns a mapping of Id -> IResourcePersistentId. - * If any resource is not found, it will throw ResourceNotFound exception - * (and no map will be returned) - */ - @Nonnull - Map resolveResourcePersistentIds( - @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List theIds); + ResolveIdentityMode theMode); /** * Returns a mapping of Id -> IResourcePersistentId. @@ -97,7 +80,7 @@ public interface IIdHelperService { @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List theIds, - boolean theExcludeDeleted); + ResolveIdentityMode theMode); /** * Given a persistent ID, returns the associated resource ID @@ -106,30 +89,40 @@ public interface IIdHelperService { IIdType translatePidIdToForcedId(FhirContext theCtx, String theResourceType, T theId); /** - * Given a forced ID, convert it to it's Long value. Since you are allowed to use string IDs for resources, we need to - * convert those to the underlying Long values that are stored, for lookup and comparison purposes. - * * @throws ResourceNotFoundException If the ID can not be found */ @Nonnull - IResourceLookup resolveResourceIdentity( - @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId) + IResourceLookup resolveResourceIdentity( + @Nonnull RequestPartitionId theRequestPartitionId, + @Nullable String theResourceType, + @Nonnull String theResourceId, + @Nonnull ResolveIdentityMode theMode) throws ResourceNotFoundException; + /** + * @throws ResourceNotFoundException If the ID can not be found + */ + @Nonnull + default T resolveResourceIdentityPid( + @Nonnull RequestPartitionId theRequestPartitionId, + @Nullable String theResourceType, + @Nonnull String theResourceId, + @Nonnull ResolveIdentityMode theMode) + throws ResourceNotFoundException { + return resolveResourceIdentity(theRequestPartitionId, theResourceType, theResourceId, theMode) + .getPersistentId(); + } + /** * Given a forced ID, convert it to it's Long value. Since you are allowed to use string IDs for resources, we need to * convert those to the underlying Long values that are stored, for lookup and comparison purposes. * Optionally filters out deleted resources. * - * @throws ResourceNotFoundException If the ID can not be found + * @since 8.0.0 */ @Nonnull - IResourceLookup resolveResourceIdentity( - @Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - String theResourceId, - boolean theExcludeDeleted) - throws ResourceNotFoundException; + Map> resolveResourceIdentities( + @Nonnull RequestPartitionId theRequestPartitionId, Collection theIds, ResolveIdentityMode theMode); /** * Returns true if the given resource ID should be stored in a forced ID. Under default config @@ -163,11 +156,11 @@ public interface IIdHelperService { /** * Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods */ - void addResolvedPidToForcedId( - T theResourcePersistentId, + void addResolvedPidToFhirId( + @Nonnull T theResourcePersistentId, @Nonnull RequestPartitionId theRequestPartitionId, - String theResourceType, - @Nullable String theForcedId, + @Nonnull String theResourceType, + @Nonnull String theFhirId, @Nullable Date theDeletedAt); @Nonnull diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ResolveIdentityMode.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ResolveIdentityMode.java new file mode 100644 index 00000000000..0f1389c394b --- /dev/null +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ResolveIdentityMode.java @@ -0,0 +1,105 @@ +/*- + * #%L + * HAPI FHIR Storage api + * %% + * Copyright (C) 2014 - 2024 Smile CDR, Inc. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package ca.uhn.fhir.jpa.api.svc; + +/** + * Resolution mode parameter for methods on {@link IIdHelperService} + */ +public class ResolveIdentityMode { + + private final boolean myIncludeDeleted; + private final boolean myUseCache; + private final boolean myFailOnDeleted; + + /** + * Non-instantiable. Use the factory methods on this class. + */ + private ResolveIdentityMode(boolean theIncludeDeleted, boolean theFailOnDeleted, boolean theUseCache) { + myIncludeDeleted = theIncludeDeleted; + myUseCache = theUseCache; + myFailOnDeleted = theFailOnDeleted; + } + + public boolean isUseCache(boolean theDeleteEnabled) { + if (myUseCache) { + return true; + } else { + return !theDeleteEnabled; + } + } + + public boolean isIncludeDeleted() { + return myIncludeDeleted; + } + + public boolean isFailOnDeleted() { + return myFailOnDeleted; + } + + /** + * Deleted resource identities can be included in the results + */ + public static Builder includeDeleted() { + return new Builder(true, false); + } + + /** + * Deleted resource identities should be excluded from the results + */ + public static Builder excludeDeleted() { + return new Builder(false, false); + } + + /** + * Throw a {@link ca.uhn.fhir.rest.server.exceptions.ResourceGoneException} if + * any of the supplied IDs corresponds to a deleted resource + */ + public static Builder failOnDeleted() { + return new Builder(false, true); + } + + public static class Builder { + + private final boolean myIncludeDeleted; + private final boolean myFailOnDeleted; + + private Builder(boolean theIncludeDeleted, boolean theFailOnDeleted) { + myIncludeDeleted = theIncludeDeleted; + myFailOnDeleted = theFailOnDeleted; + } + + /** + * Cached results are acceptable. This mode is obviously more efficient since it'll always + * try the cache first, but it should not be used in cases where it matters whether the + * deleted status has changed for a resource. + */ + public ResolveIdentityMode cacheOk() { + return new ResolveIdentityMode(myIncludeDeleted, myFailOnDeleted, true); + } + + /** + * In this mode, the cache won't be used unless deletes are disabled on this server + * (meaning that the deleted status of a resource is not able to change) + */ + public ResolveIdentityMode noCacheUnlessDeletesDisabled() { + return new ResolveIdentityMode(myIncludeDeleted, myFailOnDeleted, false); + } + } +} diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java index d4f96ae6372..fee8d73dd73 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java @@ -1133,6 +1133,7 @@ public abstract class BaseTransactionProcessor { * Loop through the request and process any entries of type * PUT, POST or DELETE */ + String previousVerb = null; for (int i = 0; i < theEntries.size(); i++) { if (i % 250 == 0) { ourLog.debug("Processed {} non-GET entries out of {} in transaction", i, theEntries.size()); @@ -1144,6 +1145,11 @@ public abstract class BaseTransactionProcessor { IIdType nextResourceId = getNextResourceIdFromBaseResource(res, nextReqEntry, theAllIds); String verb = myVersionAdapter.getEntryRequestVerb(myContext, nextReqEntry); + if (previousVerb != null && !previousVerb.equals(verb)) { + handleVerbChangeInTransactionWriteOperations(); + } + previousVerb = verb; + String resourceType = res != null ? myContext.getResourceType(res) : null; Integer order = theOriginalRequestOrder.get(nextReqEntry); IBase nextRespEntry = @@ -1502,6 +1508,15 @@ public abstract class BaseTransactionProcessor { } } + /** + * Subclasses may override this in order to invoke specific operations when + * we're finished handling all the write entries in the transaction bundle + * with a given verb. + */ + protected void handleVerbChangeInTransactionWriteOperations() { + // nothing + } + /** * Check for if a resource id should be matched in a conditional update * If the FHIR version is older than R4, it follows the old specifications and does not match diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java index 3c584e8aed1..87ee74ae6e2 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/index/DaoResourceLinkResolver.java @@ -31,6 +31,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.svc.IIdHelperService; +import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; @@ -64,7 +65,7 @@ import java.util.Date; import java.util.List; import java.util.Optional; -public class DaoResourceLinkResolver implements IResourceLinkResolver { +public class DaoResourceLinkResolver> implements IResourceLinkResolver { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DaoResourceLinkResolver.class); @Autowired @@ -124,11 +125,14 @@ public class DaoResourceLinkResolver implements String idPart = targetResourceId.getIdPart(); try { if (persistentId == null) { - resolvedResource = - myIdHelperService.resolveResourceIdentity(theRequestPartitionId, resourceType, idPart); + resolvedResource = myIdHelperService.resolveResourceIdentity( + theRequestPartitionId, + resourceType, + idPart, + ResolveIdentityMode.excludeDeleted().noCacheUnlessDeletesDisabled()); ourLog.trace("Translated {}/{} to resource PID {}", type, idPart, resolvedResource); } else { - resolvedResource = new ResourceLookupPersistentIdWrapper(persistentId); + resolvedResource = new ResourceLookupPersistentIdWrapper<>(persistentId); } } catch (ResourceNotFoundException e) { @@ -142,8 +146,23 @@ public class DaoResourceLinkResolver implements RuntimeResourceDefinition missingResourceDef = myContext.getResourceDefinition(type); String resName = missingResourceDef.getName(); - throw new InvalidRequestException(Msg.code(1094) + "Resource " + resName + "/" + idPart - + " not found, specified in path: " + sourcePath); + + // Check if this was a deleted resource + try { + resolvedResource = myIdHelperService.resolveResourceIdentity( + theRequestPartitionId, + resourceType, + idPart, + ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled()); + handleDeletedTarget(resourceType, idPart, sourcePath); + } catch (ResourceNotFoundException e2) { + resolvedResource = null; + } + + if (resolvedResource == null) { + throw new InvalidRequestException(Msg.code(1094) + "Resource " + resName + "/" + idPart + + " not found, specified in path: " + sourcePath); + } } resolvedResource = createdTableOpt.get(); } @@ -201,16 +220,20 @@ public class DaoResourceLinkResolver implements } if (resolvedResource.getDeleted() != null) { - if (!myStorageSettings.isEnforceReferentialIntegrityOnWrite()) { - return false; - } - String resName = resolvedResource.getResourceType(); - throw new InvalidRequestException(Msg.code(1096) + "Resource " + resName + "/" + idPart - + " is deleted, specified in path: " + sourcePath); + return handleDeletedTarget(resolvedResource.getResourceType(), idPart, sourcePath); } return true; } + private boolean handleDeletedTarget(String resType, String idPart, String sourcePath) { + if (!myStorageSettings.isEnforceReferentialIntegrityOnWrite()) { + return false; + } + String resName = resType; + throw new InvalidRequestException(Msg.code(1096) + "Resource " + resName + "/" + idPart + + " is deleted, specified in path: " + sourcePath); + } + @Nullable @Override public IBaseResource loadTargetResource( diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamWithInlineReferencesExtractor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamWithInlineReferencesExtractor.java index b66b6b298a2..c056c5947fe 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamWithInlineReferencesExtractor.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamWithInlineReferencesExtractor.java @@ -48,7 +48,7 @@ import java.util.List; import java.util.Optional; import java.util.Set; -public abstract class BaseSearchParamWithInlineReferencesExtractor +public abstract class BaseSearchParamWithInlineReferencesExtractor> implements ISearchParamWithInlineReferencesExtractor { private static final Logger ourLog = LoggerFactory.getLogger(BaseSearchParamWithInlineReferencesExtractor.class); diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java index 33f73f60e7a..4739b26afde 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java @@ -32,9 +32,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Queue; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -53,7 +55,7 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe public static final Predicate DEFAULT_SELECT_INCLUSION_CRITERIA = t -> t.toLowerCase(Locale.US).startsWith("select"); - private static final int CAPACITY = 1000; + private static final int CAPACITY = 10000; private static final Logger ourLog = LoggerFactory.getLogger(CircularQueueCaptureQueriesListener.class); private Queue myQueries; private AtomicInteger myCommitCounter; @@ -187,6 +189,13 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe return getQueriesStartingWith("insert"); } + /** + * Returns all INSERT queries executed on the current thread - Index 0 is oldest + */ + public List getInsertQueries(Predicate theFilter) { + return getQueriesStartingWith("insert").stream().filter(theFilter).collect(Collectors.toList()); + } + /** * Returns all UPDATE queries executed on the current thread - Index 0 is oldest */ @@ -352,11 +361,8 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe List insertQueries = getInsertQueries().stream() .filter(t -> theInclusionPredicate == null || theInclusionPredicate.test(t)) .collect(Collectors.toList()); - boolean inlineParams = true; - List queries = insertQueries.stream() - .map(t -> CircularQueueCaptureQueriesListener.formatQueryAsSql(t, inlineParams, inlineParams)) - .collect(Collectors.toList()); - ourLog.info("Insert Queries:\n{}", String.join("\n", queries)); + List queriesStrings = renderQueriesForLogging(true, true, insertQueries); + ourLog.info("Insert Queries:\n{}", String.join("\n", queriesStrings)); return countQueries(insertQueries); } @@ -402,6 +408,17 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe return countQueries(getInsertQueries()); } + /** + * This method returns a count of insert queries which were issued more than + * once. If a query was issued twice with batched parameters, that does not + * count as a repeated query, but if it is issued as two separate insert + * statements with a single set of parameters this counts as a repeated + * query. + */ + public int countInsertQueriesRepeated() { + return getInsertQueries(new DuplicateCheckingPredicate()).size(); + } + public int countUpdateQueries() { return countQueries(getUpdateQueries()); } @@ -467,4 +484,19 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe b.append("\n"); return b.toString(); } + + /** + * Create a new instance of this each time you use it + */ + private static class DuplicateCheckingPredicate implements Predicate { + private final Set mySeenSql = new HashSet<>(); + + @Override + public boolean test(SqlQuery theSqlQuery) { + String sql = theSqlQuery.getSql(false, false); + boolean retVal = !mySeenSql.add(sql); + ourLog.trace("SQL duplicate[{}]: {}", retVal, sql); + return retVal; + } + } } diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java index 81b0871b91b..c020f57141f 100644 --- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java +++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/util/MemoryCacheService.java @@ -19,20 +19,25 @@ */ package ca.uhn.fhir.jpa.util; +import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; import ca.uhn.fhir.jpa.api.model.TranslationQuery; import ca.uhn.fhir.jpa.model.entity.TagTypeEnum; import ca.uhn.fhir.sl.cache.Cache; import ca.uhn.fhir.sl.cache.CacheFactory; import jakarta.annotation.Nonnull; +import jakarta.annotation.Nullable; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.hl7.fhir.instance.model.api.IIdType; import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; import java.util.Collection; import java.util.EnumMap; import java.util.Map; +import java.util.Objects; import java.util.function.Function; import static java.util.concurrent.TimeUnit.MINUTES; @@ -73,7 +78,7 @@ public class MemoryCacheService { case PID_TO_FORCED_ID: case FORCED_ID_TO_PID: case MATCH_URL: - case RESOURCE_LOOKUP: + case RESOURCE_LOOKUP_BY_FORCED_ID: case HISTORY_COUNT: case TAG_DEFINITION: case RESOURCE_CONDITIONAL_CREATE_VERSION: @@ -130,7 +135,9 @@ public class MemoryCacheService { } public void put(CacheEnum theCache, K theKey, V theValue) { - assert theCache.getKeyType().isAssignableFrom(theKey.getClass()); + assert theCache.getKeyType().isAssignableFrom(theKey.getClass()) + : "Key type " + theKey.getClass() + " doesn't match expected " + theCache.getKeyType() + " for cache " + + theCache; doPut(theCache, theKey, theValue); } @@ -150,6 +157,9 @@ public class MemoryCacheService { * in order to avoid cache poisoning. */ public void putAfterCommit(CacheEnum theCache, K theKey, V theValue) { + assert theCache.getKeyType().isAssignableFrom(theKey.getClass()) + : "Key type " + theKey.getClass() + " doesn't match expected " + theCache.getKeyType() + " for cache " + + theCache; if (TransactionSynchronizationManager.isSynchronizationActive()) { TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() { @Override @@ -192,7 +202,11 @@ public class MemoryCacheService { public enum CacheEnum { TAG_DEFINITION(TagDefinitionCacheKey.class), - RESOURCE_LOOKUP(String.class), + /** + * Key type: {@link ForcedIdCacheKey} + * Value type: {@literal JpaResourceLookup} + */ + RESOURCE_LOOKUP_BY_FORCED_ID(ForcedIdCacheKey.class), FORCED_ID_TO_PID(String.class), FHIRPATH_EXPRESSION(String.class), /** @@ -317,4 +331,62 @@ public class MemoryCacheService { return myHashCode; } } + + public static class ForcedIdCacheKey { + + private final String myResourceType; + private final String myResourceId; + private final RequestPartitionId myRequestPartitionId; + private final int myHashCode; + + public ForcedIdCacheKey( + @Nullable String theResourceType, + @Nonnull String theResourceId, + @Nonnull RequestPartitionId theRequestPartitionId) { + myResourceType = theResourceType; + myResourceId = theResourceId; + myRequestPartitionId = theRequestPartitionId; + myHashCode = Objects.hash(myResourceType, myResourceId, myRequestPartitionId); + } + + @Override + public boolean equals(Object theO) { + if (this == theO) { + return true; + } + if (!(theO instanceof ForcedIdCacheKey)) { + return false; + } + ForcedIdCacheKey that = (ForcedIdCacheKey) theO; + return Objects.equals(myResourceType, that.myResourceType) + && Objects.equals(myResourceId, that.myResourceId) + && Objects.equals(myRequestPartitionId, that.myRequestPartitionId); + } + + @Override + public int hashCode() { + return myHashCode; + } + + /** + * Creates and returns a new unqualified versionless IIdType instance + */ + public IIdType toIdType(FhirContext theFhirCtx) { + if (myResourceType == null) { + return toIdTypeWithoutResourceType(theFhirCtx); + } + IIdType retVal = theFhirCtx.getVersion().newIdType(); + retVal.setValue(myResourceType + "/" + myResourceId); + return retVal; + } + + /** + * Creates and returns a new unqualified versionless IIdType instance + */ + public IIdType toIdTypeWithoutResourceType(FhirContext theFhirCtx) { + IIdType retVal = theFhirCtx.getVersion().newIdType(); + retVal.setValue(myResourceId); + return retVal; + } + } } diff --git a/hapi-fhir-structures-dstu2.1/pom.xml b/hapi-fhir-structures-dstu2.1/pom.xml index 5a326c0d09f..d8d55d0d193 100644 --- a/hapi-fhir-structures-dstu2.1/pom.xml +++ b/hapi-fhir-structures-dstu2.1/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu2/pom.xml b/hapi-fhir-structures-dstu2/pom.xml index 061ddd71d19..12ef01f1c22 100644 --- a/hapi-fhir-structures-dstu2/pom.xml +++ b/hapi-fhir-structures-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-dstu3/pom.xml b/hapi-fhir-structures-dstu3/pom.xml index 84db45384f0..eb06ff89d41 100644 --- a/hapi-fhir-structures-dstu3/pom.xml +++ b/hapi-fhir-structures-dstu3/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-hl7org-dstu2/pom.xml b/hapi-fhir-structures-hl7org-dstu2/pom.xml index dd05f6dff09..adc0100a995 100644 --- a/hapi-fhir-structures-hl7org-dstu2/pom.xml +++ b/hapi-fhir-structures-hl7org-dstu2/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4/pom.xml b/hapi-fhir-structures-r4/pom.xml index 982bdf7d713..b862ed06928 100644 --- a/hapi-fhir-structures-r4/pom.xml +++ b/hapi-fhir-structures-r4/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r4b/pom.xml b/hapi-fhir-structures-r4b/pom.xml index 27dddad0406..0ecd3520d9b 100644 --- a/hapi-fhir-structures-r4b/pom.xml +++ b/hapi-fhir-structures-r4b/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-structures-r5/pom.xml b/hapi-fhir-structures-r5/pom.xml index a4ea18cb426..fb636ac3592 100644 --- a/hapi-fhir-structures-r5/pom.xml +++ b/hapi-fhir-structures-r5/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-test-utilities/pom.xml b/hapi-fhir-test-utilities/pom.xml index 03a21a9ecbd..21ac27a7bfa 100644 --- a/hapi-fhir-test-utilities/pom.xml +++ b/hapi-fhir-test-utilities/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-testpage-overlay/pom.xml b/hapi-fhir-testpage-overlay/pom.xml index 8925162aafd..7cca45e4626 100644 --- a/hapi-fhir-testpage-overlay/pom.xml +++ b/hapi-fhir-testpage-overlay/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-fhir-validation-resources-dstu2.1/pom.xml b/hapi-fhir-validation-resources-dstu2.1/pom.xml index bb0b1aceeda..c96a4fa6d72 100644 --- a/hapi-fhir-validation-resources-dstu2.1/pom.xml +++ b/hapi-fhir-validation-resources-dstu2.1/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu2/pom.xml b/hapi-fhir-validation-resources-dstu2/pom.xml index 03956de36ff..9e1cb753cc3 100644 --- a/hapi-fhir-validation-resources-dstu2/pom.xml +++ b/hapi-fhir-validation-resources-dstu2/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-dstu3/pom.xml b/hapi-fhir-validation-resources-dstu3/pom.xml index 38ff0109abe..9aa79b6c5bd 100644 --- a/hapi-fhir-validation-resources-dstu3/pom.xml +++ b/hapi-fhir-validation-resources-dstu3/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4/pom.xml b/hapi-fhir-validation-resources-r4/pom.xml index 85ea5282fd3..65bc10b13f4 100644 --- a/hapi-fhir-validation-resources-r4/pom.xml +++ b/hapi-fhir-validation-resources-r4/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r4b/pom.xml b/hapi-fhir-validation-resources-r4b/pom.xml index 1074f2b02df..69417a1d0eb 100644 --- a/hapi-fhir-validation-resources-r4b/pom.xml +++ b/hapi-fhir-validation-resources-r4b/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation-resources-r5/pom.xml b/hapi-fhir-validation-resources-r5/pom.xml index f39e1d78f9b..e0a2ac82d76 100644 --- a/hapi-fhir-validation-resources-r5/pom.xml +++ b/hapi-fhir-validation-resources-r5/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-fhir-validation/pom.xml b/hapi-fhir-validation/pom.xml index 41b7e93b381..a4067215f26 100644 --- a/hapi-fhir-validation/pom.xml +++ b/hapi-fhir-validation/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-deployable-pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../hapi-deployable-pom/pom.xml diff --git a/hapi-tinder-plugin/pom.xml b/hapi-tinder-plugin/pom.xml index 9b8f94fd499..d3fa30406ab 100644 --- a/hapi-tinder-plugin/pom.xml +++ b/hapi-tinder-plugin/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/hapi-tinder-test/pom.xml b/hapi-tinder-test/pom.xml index 54bdfa07afd..db18a5dad1f 100644 --- a/hapi-tinder-test/pom.xml +++ b/hapi-tinder-test/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../pom.xml diff --git a/pom.xml b/pom.xml index b9add014547..31b2d0c455f 100644 --- a/pom.xml +++ b/pom.xml @@ -8,7 +8,7 @@ ca.uhn.hapi.fhir hapi-fhir pom - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT HAPI-FHIR An open-source implementation of the FHIR specification in Java. @@ -2623,7 +2623,7 @@ ca.uhn.hapi.fhir hapi-tinder-plugin - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT diff --git a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml index a243f5bacf7..2618b695119 100644 --- a/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml +++ b/tests/hapi-fhir-base-test-jaxrsserver-kotlin/pom.xml @@ -7,7 +7,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-client/pom.xml b/tests/hapi-fhir-base-test-mindeps-client/pom.xml index cd40c2f6ab9..1f3098c2673 100644 --- a/tests/hapi-fhir-base-test-mindeps-client/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-client/pom.xml @@ -4,7 +4,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../../pom.xml diff --git a/tests/hapi-fhir-base-test-mindeps-server/pom.xml b/tests/hapi-fhir-base-test-mindeps-server/pom.xml index f16777aa666..f4c44792a09 100644 --- a/tests/hapi-fhir-base-test-mindeps-server/pom.xml +++ b/tests/hapi-fhir-base-test-mindeps-server/pom.xml @@ -5,7 +5,7 @@ ca.uhn.hapi.fhir hapi-fhir - 7.7.6-SNAPSHOT + 7.7.7-SNAPSHOT ../../pom.xml