Merge branch 'hapifhir:master' into fix_for_5.0.0-snapshot1
This commit is contained in:
commit
8ad18915a1
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ public final class Msg {
|
|||
|
||||
/**
|
||||
* IMPORTANT: Please update the following comment after you add a new code
|
||||
* Last code value: 2075
|
||||
* Last code value: 2076
|
||||
*/
|
||||
|
||||
private Msg() {}
|
||||
|
|
|
@ -1345,6 +1345,34 @@ public enum Pointcut implements IPointcut {
|
|||
"ca.uhn.fhir.rest.api.server.storage.TransactionDetails"
|
||||
),
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked before client-assigned id is created.
|
||||
* <p>
|
||||
* Hooks will have access to the contents of the resource being created
|
||||
* so that client-assigned ids can be allowed/denied. These changes will
|
||||
* be reflected in permanent storage.
|
||||
* </p>
|
||||
* Hooks may accept the following parameters:
|
||||
* <ul>
|
||||
* <li>org.hl7.fhir.instance.model.api.IBaseResource</li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
* pulled out of the servlet request. Note that the bean
|
||||
* properties are not all guaranteed to be populated, depending on how early during processing the
|
||||
* exception occurred.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
* </p>
|
||||
*/
|
||||
STORAGE_PRESTORAGE_CLIENT_ASSIGNED_ID(void.class,
|
||||
"org.hl7.fhir.instance.model.api.IBaseResource",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails"
|
||||
),
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked before a resource will be updated, immediately before the resource
|
||||
|
|
|
@ -159,6 +159,7 @@ ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl.cannotUpdateUrlOrVersionForValueSetReso
|
|||
ca.uhn.fhir.jpa.patch.JsonPatchUtils.failedToApplyPatch=Failed to apply JSON patch to {0}: {1}
|
||||
|
||||
ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices.invalidGraphqlArgument=Unknown GraphQL argument "{0}". Value GraphQL argument for this type are: {1}
|
||||
ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices.invalidGraphqlCursorArgument=GraphQL Cursor "{0}" does not exist and may have expired
|
||||
|
||||
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.nonDefaultPartitionSelectedForNonPartitionable=Resource type {0} can not be partitioned
|
||||
ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc.unknownPartitionId=Unknown partition ID: {0}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.batch.config;
|
|||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public final class BatchConstants {
|
||||
|
@ -90,6 +91,8 @@ public final class BatchConstants {
|
|||
*/
|
||||
public static final String JOB_EXECUTION_RESOURCE_TYPE = "resourceType";
|
||||
|
||||
public static final List<String> PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = List.of("Practitioner", "Organization");
|
||||
|
||||
/**
|
||||
* This Set contains the step names across all job types that are appropriate for
|
||||
* someone to look at the write count for that given step in order to determine the
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
type: fix
|
||||
issue: 3524
|
||||
jira: SMILE-3672
|
||||
title: "Previously if partitioning was enabled, the URL returned from `$export-poll-status` provided a location in the wrong partition. This has been fixed and all Bulk Exports will be stored in the `DEFAULT` partition."
|
|
@ -0,0 +1,4 @@
|
|||
type: add
|
||||
issue: 3547
|
||||
jira: SMILE-4141
|
||||
title: "Added a new pointcut: `STORAGE_PRESTORAGE_CLIENT_ASSIGNED_ID` which is invoked when a user attempts to create a resource with a client-assigned ID. "
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 3548
|
||||
jira: SMILE-4023
|
||||
title: "A recent change caused searches that use _has in conjuction with a `Resource` search param (e.g. _id, _text) to fail. This has been fixed."
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: change
|
||||
issue: 3550
|
||||
title: "Method signatures on several interfaces and classes related to the `$expunge` operation have changed to support
|
||||
the case where the primary identifier of a resource is not necessarily a `Long`."
|
|
@ -0,0 +1,4 @@
|
|||
type: fix
|
||||
issue: 3553
|
||||
jira: SMILE-3964
|
||||
title: "Added a new setting to BinaryStorageInterceptor which allows you to disable binary de-externalization during resource reads."
|
|
@ -0,0 +1,4 @@
|
|||
type: add
|
||||
issue: 3557
|
||||
jira: SMILE-4062
|
||||
title: "Group Bulk Export (e.g. Group/123/$export) now additionally supports Organization and Practitioner as valid _type parameters. This works internally by querying using a `_has` parameter"
|
|
@ -0,0 +1,5 @@
|
|||
type: fix
|
||||
issue: 3561
|
||||
jira: SMILE-3728
|
||||
title: "Previously, Bulk export jobs automatically cleared the collection after a hardcoded 2 hour time period from start of the job. This is now configurable via a new DaoConfig property,
|
||||
`setBulkExportFileRetentionPeriodHours()`. If this is set to a value of 0 or below, then the collections will never be expired."
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -25,11 +25,11 @@ import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
|||
import ca.uhn.fhir.jpa.dao.expunge.PartitionRunner;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
|
@ -58,14 +58,14 @@ public class MdmLinkDeleter implements ItemProcessor<List<Long>, List<Long>> {
|
|||
public List<Long> process(List<Long> thePidList) throws Exception {
|
||||
ConcurrentLinkedQueue<Long> goldenPidAggregator = new ConcurrentLinkedQueue<>();
|
||||
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getReindexBatchSize(), myDaoConfig.getReindexThreadCount());
|
||||
partitionRunner.runInPartitionedThreads(thePidList, pids -> removeLinks(pids, goldenPidAggregator));
|
||||
partitionRunner.runInPartitionedThreads(ResourcePersistentId.fromLongList(thePidList), pids -> removeLinks(pids, goldenPidAggregator));
|
||||
return new ArrayList<>(goldenPidAggregator);
|
||||
}
|
||||
|
||||
private void removeLinks(List<Long> pidList, ConcurrentLinkedQueue<Long> theGoldenPidAggregator) {
|
||||
private void removeLinks(List<ResourcePersistentId> pidList, ConcurrentLinkedQueue<Long> theGoldenPidAggregator) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
|
||||
txTemplate.executeWithoutResult(t -> theGoldenPidAggregator.addAll(deleteMdmLinksAndReturnGoldenResourcePids(pidList)));
|
||||
txTemplate.executeWithoutResult(t -> theGoldenPidAggregator.addAll(deleteMdmLinksAndReturnGoldenResourcePids(ResourcePersistentId.toLongList(pidList))));
|
||||
}
|
||||
|
||||
public List<Long> deleteMdmLinksAndReturnGoldenResourcePids(List<Long> thePids) {
|
||||
|
|
|
@ -34,14 +34,18 @@ import org.apache.commons.lang3.StringUtils;
|
|||
import org.hl7.fhir.instance.model.api.IBaseExtension;
|
||||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.lang.NonNull;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES;
|
||||
|
||||
/**
|
||||
* Reusable Item Processor which attaches an extension to any outgoing resource. This extension will contain a resource
|
||||
* reference to the golden resource patient of the given resources' patient. (e.g. Observation.subject, Immunization.patient, etc)
|
||||
|
@ -49,6 +53,7 @@ import java.util.Optional;
|
|||
public class GoldenResourceAnnotatingProcessor implements ItemProcessor<List<IBaseResource>, List<IBaseResource>> {
|
||||
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
|
||||
|
||||
|
||||
@Value("#{stepExecutionContext['resourceType']}")
|
||||
private String myResourceType;
|
||||
|
||||
|
@ -79,21 +84,34 @@ public class GoldenResourceAnnotatingProcessor implements ItemProcessor<List<IBa
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<IBaseResource> process(List<IBaseResource> theIBaseResources) throws Exception {
|
||||
//If MDM expansion is enabled, add this magic new extension, otherwise, return the resource as is.
|
||||
if (myMdmEnabled) {
|
||||
public List<IBaseResource> process(@NonNull List<IBaseResource> theIBaseResources) throws Exception {
|
||||
if (shouldAnnotateResource()) {
|
||||
lazyLoadSearchParamsAndFhirPath();
|
||||
theIBaseResources.forEach(this::annotateBackwardsReferences);
|
||||
}
|
||||
return theIBaseResources;
|
||||
}
|
||||
|
||||
private void lazyLoadSearchParamsAndFhirPath() {
|
||||
if (myRuntimeSearchParam == null) {
|
||||
populateRuntimeSearchParam();
|
||||
}
|
||||
if (myPatientFhirPath == null) {
|
||||
populatePatientFhirPath();
|
||||
}
|
||||
theIBaseResources.forEach(this::annotateClinicalResourceWithRelatedGoldenResourcePatient);
|
||||
}
|
||||
return theIBaseResources;
|
||||
}
|
||||
|
||||
private void annotateClinicalResourceWithRelatedGoldenResourcePatient(IBaseResource iBaseResource) {
|
||||
/**
|
||||
* If the resource is added via a forward-reference from a patient, e.g. Patient.managingOrganization, we have no way to fetch the patient at this point in time.
|
||||
* This is a shortcoming of including the forward reference types in a Group/Patient bulk export.
|
||||
*
|
||||
* @return true if the resource should be annotated with the golden resource patient reference
|
||||
*/
|
||||
private boolean shouldAnnotateResource() {
|
||||
return myMdmEnabled && !PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(myResourceType);
|
||||
}
|
||||
|
||||
private void annotateBackwardsReferences(IBaseResource iBaseResource) {
|
||||
Optional<String> patientReference = getPatientReference(iBaseResource);
|
||||
if (patientReference.isPresent()) {
|
||||
addGoldenResourceExtension(iBaseResource, patientReference.get());
|
||||
|
@ -104,13 +122,15 @@ public class GoldenResourceAnnotatingProcessor implements ItemProcessor<List<IBa
|
|||
}
|
||||
|
||||
private Optional<String> getPatientReference(IBaseResource iBaseResource) {
|
||||
//In the case of patient, we will just use the raw ID.
|
||||
if (myResourceType.equalsIgnoreCase("Patient")) {
|
||||
return Optional.of(iBaseResource.getIdElement().getIdPart());
|
||||
//Otherwise, we will perform evaluation of the fhirPath.
|
||||
} else {
|
||||
Optional<IBaseReference> optionalReference = getFhirParser().evaluateFirst(iBaseResource, myPatientFhirPath, IBaseReference.class);
|
||||
if (optionalReference.isPresent()) {
|
||||
return optionalReference.map(theIBaseReference -> theIBaseReference.getReferenceElement().getIdPart());
|
||||
} else {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,6 +38,8 @@ import ca.uhn.fhir.jpa.util.QueryChunker;
|
|||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.HasOrListParam;
|
||||
import ca.uhn.fhir.rest.param.HasParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -58,6 +60,8 @@ import java.util.Optional;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES;
|
||||
|
||||
/**
|
||||
* Bulk Item reader for the Group Bulk Export job.
|
||||
* Instead of performing a normal query on the resource type using type filters, we instead
|
||||
|
@ -87,13 +91,14 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
|
|||
|
||||
@Override
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
Set<ResourcePersistentId> myReadPids = new HashSet<>();
|
||||
|
||||
//Short circuit out if we detect we are attempting to extract patients
|
||||
if (myResourceType.equalsIgnoreCase("Patient")) {
|
||||
return getExpandedPatientIterator();
|
||||
}
|
||||
|
||||
|
||||
|
||||
//First lets expand the group so we get a list of all patient IDs of the group, and MDM-matched patient IDs of the group.
|
||||
Set<String> expandedMemberResourceIds = expandAllPatientPidsFromGroup();
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
|
@ -102,15 +107,16 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
|
|||
|
||||
//Next, let's search for the target resources, with their correct patient references, chunked.
|
||||
//The results will be jammed into myReadPids
|
||||
Set<ResourcePersistentId> myExpandedMemberPids = new HashSet<>();
|
||||
QueryChunker<String> queryChunker = new QueryChunker<>();
|
||||
queryChunker.chunk(new ArrayList<>(expandedMemberResourceIds), QUERY_CHUNK_SIZE, (idChunk) -> {
|
||||
queryResourceTypeWithReferencesToPatients(myReadPids, idChunk);
|
||||
queryResourceTypeWithReferencesToPatients(myExpandedMemberPids, idChunk);
|
||||
});
|
||||
|
||||
if (ourLog.isDebugEnabled()) {
|
||||
ourLog.debug("Resource PIDs to be Bulk Exported: [{}]", myReadPids.stream().map(ResourcePersistentId::toString).collect(Collectors.joining(",")));
|
||||
ourLog.debug("Resource PIDs to be Bulk Exported: {}", myExpandedMemberPids);
|
||||
}
|
||||
return myReadPids.iterator();
|
||||
return myExpandedMemberPids.iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -243,6 +249,7 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
|
|||
}
|
||||
|
||||
private void queryResourceTypeWithReferencesToPatients(Set<ResourcePersistentId> myReadPids, List<String> idChunk) {
|
||||
|
||||
//Build SP map
|
||||
//First, inject the _typeFilters and _since from the export job
|
||||
List<SearchParameterMap> expandedSpMaps = createSearchParameterMapsForResourceType();
|
||||
|
@ -251,12 +258,16 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
|
|||
//Since we are in a bulk job, we have to ensure the user didn't jam in a patient search param, since we need to manually set that.
|
||||
validateSearchParameters(expandedSpMap);
|
||||
|
||||
// Now, further filter the query with patient references defined by the chunk of IDs we have.
|
||||
filterSearchByResourceIds(idChunk, expandedSpMap);
|
||||
|
||||
// Fetch and cache a search builder for this resource type
|
||||
ISearchBuilder searchBuilder = getSearchBuilderForLocalResourceType();
|
||||
|
||||
// Now, further filter the query with patient references defined by the chunk of IDs we have.
|
||||
if (PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(myResourceType)) {
|
||||
filterSearchByHasParam(idChunk, expandedSpMap);
|
||||
} else {
|
||||
filterSearchByResourceIds(idChunk, expandedSpMap);
|
||||
}
|
||||
|
||||
//Execute query and all found pids to our local iterator.
|
||||
IResultIterator resultIterator = searchBuilder.createQuery(expandedSpMap, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions());
|
||||
while (resultIterator.hasNext()) {
|
||||
|
@ -265,17 +276,41 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param idChunk
|
||||
* @param expandedSpMap
|
||||
*/
|
||||
private void filterSearchByHasParam(List<String> idChunk, SearchParameterMap expandedSpMap) {
|
||||
HasOrListParam hasOrListParam = new HasOrListParam();
|
||||
idChunk.stream().forEach(id -> hasOrListParam.addOr(buildHasParam(id)));
|
||||
expandedSpMap.add("_has", hasOrListParam);
|
||||
}
|
||||
|
||||
private HasParam buildHasParam(String theId) {
|
||||
if ("Practitioner".equalsIgnoreCase(myResourceType)) {
|
||||
return new HasParam("Patient", "general-practitioner", "_id", theId);
|
||||
} else if ("Organization".equalsIgnoreCase(myResourceType)) {
|
||||
return new HasParam("Patient", "organization", "_id", theId);
|
||||
} else {
|
||||
throw new IllegalArgumentException(Msg.code(2077) + " We can't handle forward references onto type " + myResourceType);
|
||||
}
|
||||
}
|
||||
|
||||
private void filterSearchByResourceIds(List<String> idChunk, SearchParameterMap expandedSpMap) {
|
||||
ReferenceOrListParam orList = new ReferenceOrListParam();
|
||||
idChunk.forEach(id -> orList.add(new ReferenceParam(id)));
|
||||
expandedSpMap.add(getPatientSearchParamForCurrentResourceType().getName(), orList);
|
||||
}
|
||||
|
||||
private RuntimeSearchParam validateSearchParameters(SearchParameterMap expandedSpMap) {
|
||||
private void validateSearchParameters(SearchParameterMap expandedSpMap) {
|
||||
if (PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(myResourceType)) {
|
||||
return;
|
||||
} else {
|
||||
RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType();
|
||||
if (expandedSpMap.get(runtimeSearchParam.getName()) != null) {
|
||||
throw new IllegalArgumentException(Msg.code(792) + String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName()));
|
||||
}
|
||||
return runtimeSearchParam;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -200,7 +200,7 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
|
|||
|
||||
Optional<BulkExportJobEntity> jobToDelete = myTxTemplate.execute(t -> {
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByExpiry(page, new Date());
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findNotRunningByExpiry(page, new Date());
|
||||
if (submittedJobs.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
|
@ -78,10 +79,11 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
private Set<String> myCompartmentResources;
|
||||
|
||||
private final int myRetentionPeriod = (int) (2 * DateUtils.MILLIS_PER_HOUR);
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
|
@ -233,8 +235,18 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
.setStatus(theJob.getStatus());
|
||||
}
|
||||
|
||||
/**
|
||||
* If the retention period is set to <= 0, set it to null, which prevents it from getting expired, otherwise, set
|
||||
* the retention period.
|
||||
*
|
||||
* @param theJob the job to update the expiry for.
|
||||
*/
|
||||
private void updateExpiry(BulkExportJobEntity theJob) {
|
||||
theJob.setExpiry(DateUtils.addMilliseconds(new Date(), myRetentionPeriod));
|
||||
if (myDaoConfig.getBulkExportFileRetentionPeriodHours() > 0) {
|
||||
theJob.setExpiry(DateUtils.addHours(new Date(), myDaoConfig.getBulkExportFileRetentionPeriodHours()));
|
||||
} else {
|
||||
theJob.setExpiry(null);
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
|
|
|
@ -139,6 +139,7 @@ import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
|||
import ca.uhn.fhir.mdm.api.IMdmClearJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
|
||||
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
|
||||
|
@ -261,8 +262,11 @@ public class JpaConfig {
|
|||
|
||||
@Bean(name = "myBinaryStorageInterceptor")
|
||||
@Lazy
|
||||
public BinaryStorageInterceptor binaryStorageInterceptor() {
|
||||
return new BinaryStorageInterceptor();
|
||||
public BinaryStorageInterceptor binaryStorageInterceptor(DaoConfig theDaoConfig) {
|
||||
BinaryStorageInterceptor interceptor = new BinaryStorageInterceptor();
|
||||
interceptor.setAllowAutoInflateBinaries(theDaoConfig.isAllowAutoInflateBinaries());
|
||||
interceptor.setAutoInflateBinariesMaximumSize(theDaoConfig.getAutoInflateBinariesMaximumBytes());
|
||||
return interceptor;
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -754,8 +758,8 @@ public class JpaConfig {
|
|||
|
||||
@Bean
|
||||
@Scope("prototype")
|
||||
public ExpungeOperation expungeOperation(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
return new ExpungeOperation(theResourceName, theResourceId, theVersion, theExpungeOptions, theRequestDetails);
|
||||
public ExpungeOperation expungeOperation(String theResourceName, ResourcePersistentId theResourceId, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
return new ExpungeOperation(theResourceName, theResourceId, theExpungeOptions, theRequestDetails);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -77,6 +77,7 @@ import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
|||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -1422,7 +1423,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
*/
|
||||
if (thePerformIndexing) {
|
||||
if (newParams == null) {
|
||||
myExpungeService.deleteAllSearchParams(entity.getId());
|
||||
myExpungeService.deleteAllSearchParams(new ResourcePersistentId(entity.getId()));
|
||||
} else {
|
||||
|
||||
// Synchronize search param indexes
|
||||
|
|
|
@ -70,6 +70,7 @@ import ca.uhn.fhir.rest.api.EncodingEnum;
|
|||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.api.ValidationModeEnum;
|
||||
|
@ -310,17 +311,28 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
notifyInterceptors(RestOperationTypeEnum.CREATE, requestDetails);
|
||||
}
|
||||
|
||||
String resourceIdBeforeStorage = theResource.getIdElement().getIdPart();
|
||||
boolean resourceHadIdBeforeStorage = isNotBlank(resourceIdBeforeStorage);
|
||||
boolean resourceIdWasServerAssigned = theResource.getUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED) == Boolean.TRUE;
|
||||
|
||||
HookParams hookParams;
|
||||
|
||||
// Notify interceptor for accepting/rejecting client assigned ids
|
||||
if (!resourceIdWasServerAssigned && resourceHadIdBeforeStorage) {
|
||||
hookParams = new HookParams()
|
||||
.add(IBaseResource.class, theResource)
|
||||
.add(RequestDetails.class, theRequest);
|
||||
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_CLIENT_ASSIGNED_ID, hookParams);
|
||||
}
|
||||
|
||||
// Interceptor call: STORAGE_PRESTORAGE_RESOURCE_CREATED
|
||||
HookParams hookParams = new HookParams()
|
||||
hookParams = new HookParams()
|
||||
.add(IBaseResource.class, theResource)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, hookParams);
|
||||
|
||||
String resourceIdBeforeStorage = theResource.getIdElement().getIdPart();
|
||||
boolean resourceHadIdBeforeStorage = isNotBlank(resourceIdBeforeStorage);
|
||||
boolean resourceIdWasServerAssigned = theResource.getUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED) == Boolean.TRUE;
|
||||
if (resourceHadIdBeforeStorage && !resourceIdWasServerAssigned) {
|
||||
validateResourceIdCreation(theResource, theRequest);
|
||||
}
|
||||
|
@ -887,10 +899,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
throw new PreconditionFailedException(Msg.code(969) + "Can not perform version-specific expunge of resource " + theId.toUnqualified().getValue() + " as this is the current version");
|
||||
}
|
||||
|
||||
return myExpungeService.expunge(getResourceName(), entity.getResourceId(), entity.getVersion(), theExpungeOptions, theRequest);
|
||||
return myExpungeService.expunge(getResourceName(), new ResourcePersistentId(entity.getResourceId(), entity.getVersion()), theExpungeOptions, theRequest);
|
||||
}
|
||||
|
||||
return myExpungeService.expunge(getResourceName(), entity.getResourceId(), null, theExpungeOptions, theRequest);
|
||||
return myExpungeService.expunge(getResourceName(), new ResourcePersistentId(entity.getResourceId()), theExpungeOptions, theRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -898,7 +910,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
|
||||
|
||||
return myExpungeService.expunge(getResourceName(), null, null, theExpungeOptions, theRequestDetails);
|
||||
return myExpungeService.expunge(getResourceName(), null, theExpungeOptions, theRequestDetails);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -83,7 +83,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
return myExpungeService.expunge(null, null, null, theExpungeOptions, theRequestDetails);
|
||||
return myExpungeService.expunge(null, null, theExpungeOptions, theRequestDetails);
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
|
|
|
@ -43,6 +43,9 @@ public interface IBulkExportJobDao extends JpaRepository<BulkExportJobEntity, Lo
|
|||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff")
|
||||
Slice<BulkExportJobEntity> findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry IS NOT NULL and j.myExpiry < :cutoff AND j.myStatus <> 'BUILDING'")
|
||||
Slice<BulkExportJobEntity> findNotRunningByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC")
|
||||
Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkExportJobStatusEnum theNotStatus);
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
|
|||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
|
@ -125,15 +126,15 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
|
||||
@Override
|
||||
@Transactional
|
||||
public List<Long> findHistoricalVersionsOfNonDeletedResources(String theResourceName, Long theResourceId, Long theVersion, int theRemainingCount) {
|
||||
public List<ResourcePersistentId> findHistoricalVersionsOfNonDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theRemainingCount) {
|
||||
Pageable page = PageRequest.of(0, theRemainingCount);
|
||||
|
||||
Slice<Long> ids;
|
||||
if (theResourceId != null) {
|
||||
if (theVersion != null) {
|
||||
ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourceId, theVersion));
|
||||
if (theResourceId != null && theResourceId.getId() != null) {
|
||||
if (theResourceId.getVersion() != null) {
|
||||
ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourceId.getIdAsLong(), theResourceId.getVersion()));
|
||||
} else {
|
||||
ids = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theResourceId);
|
||||
ids = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theResourceId.getIdAsLong());
|
||||
}
|
||||
} else {
|
||||
if (theResourceName != null) {
|
||||
|
@ -143,16 +144,16 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
}
|
||||
}
|
||||
|
||||
return ids.getContent();
|
||||
return ResourcePersistentId.fromLongList(ids.getContent());
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public List<Long> findHistoricalVersionsOfDeletedResources(String theResourceName, Long theResourceId, int theRemainingCount) {
|
||||
public List<ResourcePersistentId> findHistoricalVersionsOfDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theRemainingCount) {
|
||||
Pageable page = PageRequest.of(0, theRemainingCount);
|
||||
Slice<Long> ids;
|
||||
if (theResourceId != null) {
|
||||
ids = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
|
||||
ids = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId.getIdAsLong(), theResourceName);
|
||||
ourLog.info("Expunging {} deleted resources of type[{}] and ID[{}]", ids.getNumberOfElements(), theResourceName, theResourceId);
|
||||
} else {
|
||||
if (theResourceName != null) {
|
||||
|
@ -163,14 +164,14 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
ourLog.info("Expunging {} deleted resources (all types)", ids.getNumberOfElements());
|
||||
}
|
||||
}
|
||||
return ids.getContent();
|
||||
return ResourcePersistentId.fromLongList(ids.getContent());
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void expungeCurrentVersionOfResources(RequestDetails theRequestDetails, List<Long> theResourceIds, AtomicInteger theRemainingCount) {
|
||||
for (Long next : theResourceIds) {
|
||||
expungeCurrentVersionOfResource(theRequestDetails, next, theRemainingCount);
|
||||
public void expungeCurrentVersionOfResources(RequestDetails theRequestDetails, List<ResourcePersistentId> theResourceIds, AtomicInteger theRemainingCount) {
|
||||
for (ResourcePersistentId next : theResourceIds) {
|
||||
expungeCurrentVersionOfResource(theRequestDetails, next.getIdAsLong(), theRemainingCount);
|
||||
if (theRemainingCount.get() <= 0) {
|
||||
return;
|
||||
}
|
||||
|
@ -226,9 +227,9 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
|
||||
@Override
|
||||
@Transactional
|
||||
public void expungeHistoricalVersionsOfIds(RequestDetails theRequestDetails, List<Long> theResourceIds, AtomicInteger theRemainingCount) {
|
||||
for (Long next : theResourceIds) {
|
||||
expungeHistoricalVersionsOfId(theRequestDetails, next, theRemainingCount);
|
||||
public void expungeHistoricalVersionsOfIds(RequestDetails theRequestDetails, List<ResourcePersistentId> theResourceIds, AtomicInteger theRemainingCount) {
|
||||
for (ResourcePersistentId next : theResourceIds) {
|
||||
expungeHistoricalVersionsOfId(theRequestDetails, next.getIdAsLong(), theRemainingCount);
|
||||
if (theRemainingCount.get() <= 0) {
|
||||
return;
|
||||
}
|
||||
|
@ -237,9 +238,9 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
|
||||
@Override
|
||||
@Transactional
|
||||
public void expungeHistoricalVersions(RequestDetails theRequestDetails, List<Long> theHistoricalIds, AtomicInteger theRemainingCount) {
|
||||
for (Long next : theHistoricalIds) {
|
||||
expungeHistoricalVersion(theRequestDetails, next, theRemainingCount);
|
||||
public void expungeHistoricalVersions(RequestDetails theRequestDetails, List<ResourcePersistentId> theHistoricalIds, AtomicInteger theRemainingCount) {
|
||||
for (ResourcePersistentId next : theHistoricalIds) {
|
||||
expungeHistoricalVersion(theRequestDetails, next.getIdAsLong(), theRemainingCount);
|
||||
if (theRemainingCount.get() <= 0) {
|
||||
return;
|
||||
}
|
||||
|
@ -256,7 +257,7 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
|
||||
ourLog.info("Expunging current version of resource {}", resource.getIdDt().getValue());
|
||||
|
||||
deleteAllSearchParams(resource.getResourceId());
|
||||
deleteAllSearchParams(new ResourcePersistentId(resource.getResourceId()));
|
||||
resource.getTags().clear();
|
||||
|
||||
if (resource.getForcedId() != null) {
|
||||
|
@ -271,48 +272,48 @@ public class ResourceExpungeService implements IResourceExpungeService {
|
|||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteAllSearchParams(Long theResourceId) {
|
||||
ResourceTable resource = myResourceTableDao.findById(theResourceId).orElse(null);
|
||||
public void deleteAllSearchParams(ResourcePersistentId theResourceId) {
|
||||
ResourceTable resource = myResourceTableDao.findById(theResourceId.getIdAsLong()).orElse(null);
|
||||
|
||||
if (resource == null || resource.isParamsUriPopulated()) {
|
||||
myResourceIndexedSearchParamUriDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamUriDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsCoordsPopulated()) {
|
||||
myResourceIndexedSearchParamCoordsDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamCoordsDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsDatePopulated()) {
|
||||
myResourceIndexedSearchParamDateDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamDateDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsNumberPopulated()) {
|
||||
myResourceIndexedSearchParamNumberDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamNumberDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsQuantityPopulated()) {
|
||||
myResourceIndexedSearchParamQuantityDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamQuantityDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsQuantityNormalizedPopulated()) {
|
||||
myResourceIndexedSearchParamQuantityNormalizedDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamQuantityNormalizedDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsStringPopulated()) {
|
||||
myResourceIndexedSearchParamStringDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamStringDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsTokenPopulated()) {
|
||||
myResourceIndexedSearchParamTokenDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedSearchParamTokenDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsComboStringUniquePresent()) {
|
||||
myResourceIndexedCompositeStringUniqueDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedCompositeStringUniqueDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isParamsComboTokensNonUniquePresent()) {
|
||||
myResourceIndexedComboTokensNonUniqueDao.deleteByResourceId(theResourceId);
|
||||
myResourceIndexedComboTokensNonUniqueDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {
|
||||
mySearchParamPresentDao.deleteByResourceId(theResourceId);
|
||||
mySearchParamPresentDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
if (resource == null || resource.isHasLinks()) {
|
||||
myResourceLinkDao.deleteByResourceId(theResourceId);
|
||||
myResourceLinkDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
|
||||
if (resource == null || resource.isHasTags()) {
|
||||
myResourceTagDao.deleteByResourceId(theResourceId);
|
||||
myResourceTagDao.deleteByResourceId(theResourceId.getIdAsLong());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.dao.search;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
|
||||
import org.hibernate.search.engine.backend.common.DocumentReference;
|
||||
import org.hibernate.search.engine.search.query.SearchScroll;
|
||||
|
|
|
@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.dao.expunge.ResourceForeignKey;
|
|||
import ca.uhn.fhir.jpa.dao.expunge.ResourceTableFKProvider;
|
||||
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -84,10 +85,11 @@ public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<St
|
|||
return;
|
||||
}
|
||||
|
||||
List<ResourcePersistentId> targetPidsAsResourceIds = ResourcePersistentId.fromLongList(thePids);
|
||||
List<ResourceLink> conflictResourceLinks = Collections.synchronizedList(new ArrayList<>());
|
||||
PartitionRunner partitionRunner = new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
Consumer<List<Long>> listConsumer = someTargetPids -> findResourceLinksWithTargetPidIn(thePids, someTargetPids, conflictResourceLinks);
|
||||
partitionRunner.runInPartitionedThreads(thePids, listConsumer);
|
||||
Consumer<List<ResourcePersistentId>> listConsumer = someTargetPids -> findResourceLinksWithTargetPidIn(targetPidsAsResourceIds, someTargetPids, conflictResourceLinks);
|
||||
partitionRunner.runInPartitionedThreads(targetPidsAsResourceIds, listConsumer);
|
||||
|
||||
if (conflictResourceLinks.isEmpty()) {
|
||||
return;
|
||||
|
@ -105,14 +107,16 @@ public class DeleteExpungeProcessor implements ItemProcessor<List<Long>, List<St
|
|||
targetResourceId + " because " + sourceResourceId + " refers to it via the path " + firstConflict.getSourcePath());
|
||||
}
|
||||
|
||||
public void findResourceLinksWithTargetPidIn(List<Long> theAllTargetPids, List<Long> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
|
||||
public void findResourceLinksWithTargetPidIn(List<ResourcePersistentId> theAllTargetPids, List<ResourcePersistentId> theSomeTargetPids, List<ResourceLink> theConflictResourceLinks) {
|
||||
List<Long> allTargetPidsAsLongs = ResourcePersistentId.toLongList(theAllTargetPids);
|
||||
List<Long> someTargetPidsAsLongs = ResourcePersistentId.toLongList(theSomeTargetPids);
|
||||
// We only need to find one conflict, so if we found one already in an earlier partition run, we can skip the rest of the searches
|
||||
if (theConflictResourceLinks.isEmpty()) {
|
||||
List<ResourceLink> conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(theSomeTargetPids).stream()
|
||||
List<ResourceLink> conflictResourceLinks = myResourceLinkDao.findWithTargetPidIn(someTargetPidsAsLongs).stream()
|
||||
// Filter out resource links for which we are planning to delete the source.
|
||||
// theAllTargetPids contains a list of all the pids we are planning to delete. So we only want
|
||||
// to consider a link to be a conflict if the source of that link is not in theAllTargetPids.
|
||||
.filter(link -> !theAllTargetPids.contains(link.getSourceResourcePid()))
|
||||
.filter(link -> !allTargetPidsAsLongs.contains(link.getSourceResourcePid()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
// We do this in two steps to avoid lock contention on this synchronized list
|
||||
|
|
|
@ -78,7 +78,7 @@ public class BulkExportJobEntity implements Serializable {
|
|||
@Column(name = "STATUS_TIME", nullable = false)
|
||||
private Date myStatusTime;
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "EXP_TIME", nullable = false)
|
||||
@Column(name = "EXP_TIME", nullable = true)
|
||||
private Date myExpiry;
|
||||
@Column(name = "REQUEST", nullable = false, length = REQUEST_LENGTH)
|
||||
private String myRequest;
|
||||
|
@ -146,7 +146,7 @@ public class BulkExportJobEntity implements Serializable {
|
|||
if (myStatus != null) {
|
||||
b.append("status", myStatus + " " + new InstantType(myStatusTime).getValueAsString());
|
||||
}
|
||||
b.append("created", new InstantType(myExpiry).getValueAsString());
|
||||
b.append("created", new InstantType(myCreated).getValueAsString());
|
||||
b.append("expiry", new InstantType(myExpiry).getValueAsString());
|
||||
b.append("request", myRequest);
|
||||
b.append("since", mySince);
|
||||
|
|
|
@ -289,6 +289,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.addColumn("20220416.1", "CUR_GATED_STEP_ID")
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.STRING, 100);
|
||||
|
||||
//Make Job expiry nullable so that we can prevent job expiry by using a null value.
|
||||
version
|
||||
.onTable("HFJ_BLK_EXPORT_JOB").modifyColumn("20220423.1", "EXP_TIME").nullable().withType(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -546,17 +546,12 @@ public class QueryStack {
|
|||
//Ensure that the name of the search param
|
||||
// (e.g. the `code` in Patient?_has:Observation:subject:code=sys|val)
|
||||
// exists on the target resource type.
|
||||
RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getActiveSearchParam(targetResourceType, paramName);
|
||||
if (owningParameterDef == null) {
|
||||
throw new InvalidRequestException(Msg.code(1209) + "Unknown parameter name: " + targetResourceType + ':' + parameterName);
|
||||
}
|
||||
RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramName);
|
||||
|
||||
//Ensure that the name of the back-referenced search param on the target (e.g. the `subject` in Patient?_has:Observation:subject:code=sys|val)
|
||||
//exists on the target resource.
|
||||
RuntimeSearchParam joiningParameterDef = mySearchParamRegistry.getActiveSearchParam(targetResourceType, paramReference);
|
||||
if (joiningParameterDef == null) {
|
||||
throw new InvalidRequestException(Msg.code(1210) + "Unknown parameter name: " + targetResourceType + ':' + paramReference);
|
||||
}
|
||||
//exists on the target resource, or in the top-level Resource resource.
|
||||
mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramReference);
|
||||
|
||||
|
||||
IQueryParameterAnd<?> parsedParam = JpaParamUtil.parseQueryParams(mySearchParamRegistry, myFhirContext, owningParameterDef, paramName, parameters);
|
||||
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.search.builder;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -131,12 +131,12 @@ public class HibernateSearchIndexWriter {
|
|||
nestedQtyNode.addValue(QTY_SYSTEM, theValue.getSystem());
|
||||
nestedQtyNode.addValue(QTY_VALUE, theValue.getValue());
|
||||
|
||||
if ( ! myModelConfig.getNormalizedQuantitySearchLevel().storageOrSearchSupported()) { return; }
|
||||
if ( ! myModelConfig.getNormalizedQuantitySearchLevel().storageOrSearchSupported()) { continue; }
|
||||
|
||||
//-- convert the value/unit to the canonical form if any
|
||||
Pair canonicalForm = UcumServiceUtil.getCanonicalForm(theValue.getSystem(),
|
||||
BigDecimal.valueOf(theValue.getValue()), theValue.getCode());
|
||||
if (canonicalForm == null) { return; }
|
||||
if (canonicalForm == null) { continue; }
|
||||
|
||||
double canonicalValue = Double.parseDouble(canonicalForm.getValue().asDecimal());
|
||||
String canonicalUnits = canonicalForm.getCode();
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -49,9 +49,14 @@ import org.hl7.fhir.r4.model.Group;
|
|||
import org.hl7.fhir.r4.model.Immunization;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Practitioner;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -63,6 +68,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -74,6 +80,7 @@ import java.util.stream.Stream;
|
|||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -127,10 +134,40 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPurgeExpiredJobs() {
|
||||
|
||||
// Create an expired job
|
||||
/**
|
||||
* Returns parameters in format of:
|
||||
*
|
||||
* 1. Bulk Job status
|
||||
* 2. Expiry Date that should be set on the job
|
||||
* 3. How many jobs should be left after running a purge pass.
|
||||
*/
|
||||
static Stream<Arguments> bulkExpiryStatusProvider() {
|
||||
Date previousTime = DateUtils.addHours(new Date(), -1);
|
||||
Date futureTime = DateUtils.addHours(new Date(), 50);
|
||||
|
||||
return Stream.of(
|
||||
//Finished jobs with standard expiries.
|
||||
Arguments.of(BulkExportJobStatusEnum.COMPLETE, previousTime, 0),
|
||||
Arguments.of(BulkExportJobStatusEnum.COMPLETE, futureTime, 1),
|
||||
|
||||
//Finished job with null expiry
|
||||
Arguments.of(BulkExportJobStatusEnum.COMPLETE, null, 1),
|
||||
|
||||
//Expired errored job.
|
||||
Arguments.of(BulkExportJobStatusEnum.ERROR, previousTime, 0),
|
||||
|
||||
//Unexpired errored job.
|
||||
Arguments.of(BulkExportJobStatusEnum.ERROR, futureTime, 1),
|
||||
|
||||
//Expired job but currently still running.
|
||||
Arguments.of(BulkExportJobStatusEnum.BUILDING, previousTime, 1)
|
||||
);
|
||||
}
|
||||
@ParameterizedTest
|
||||
@MethodSource("bulkExpiryStatusProvider")
|
||||
public void testBulkExportExpiryRules(BulkExportJobStatusEnum theStatus, Date theExpiry, int theExpectedCountAfterPurge) {
|
||||
|
||||
runInTransaction(() -> {
|
||||
|
||||
Binary b = new Binary();
|
||||
|
@ -138,8 +175,8 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
String binaryId = myBinaryDao.create(b, new SystemRequestDetails()).getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
BulkExportJobEntity job = new BulkExportJobEntity();
|
||||
job.setStatus(BulkExportJobStatusEnum.COMPLETE);
|
||||
job.setExpiry(DateUtils.addHours(new Date(), -1));
|
||||
job.setStatus(theStatus);
|
||||
job.setExpiry(theExpiry);
|
||||
job.setJobId(UUID.randomUUID().toString());
|
||||
job.setCreated(new Date());
|
||||
job.setRequest("$export");
|
||||
|
@ -156,7 +193,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
file.setCollection(collection);
|
||||
file.setResource(binaryId);
|
||||
myBulkExportCollectionFileDao.save(file);
|
||||
|
||||
});
|
||||
|
||||
// Check that things were created
|
||||
|
@ -170,14 +206,13 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
// Run a purge pass
|
||||
myBulkDataExportJobSchedulingHelper.purgeExpiredFiles();
|
||||
|
||||
// Check that things were deleted
|
||||
// Check for correct remaining resources based on inputted rules from method provider.
|
||||
runInTransaction(() -> {
|
||||
assertEquals(0, myResourceTableDao.count());
|
||||
assertThat(myBulkExportJobDao.findAll(), Matchers.empty());
|
||||
assertEquals(0, myBulkExportCollectionDao.count());
|
||||
assertEquals(0, myBulkExportCollectionFileDao.count());
|
||||
assertEquals(theExpectedCountAfterPurge, myResourceTableDao.count());
|
||||
assertEquals(theExpectedCountAfterPurge, myBulkExportJobDao.findAll().size());
|
||||
assertEquals(theExpectedCountAfterPurge, myBulkExportCollectionDao.count());
|
||||
assertEquals(theExpectedCountAfterPurge, myBulkExportCollectionFileDao.count());
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -423,7 +458,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
// Fetch the job again
|
||||
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
|
||||
assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
|
||||
assertEquals(5, status.getFiles().size());
|
||||
assertEquals(7, status.getFiles().size());
|
||||
|
||||
// Iterate over the files
|
||||
for (IBulkDataExportSvc.FileEntry next : status.getFiles()) {
|
||||
|
@ -443,6 +478,12 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
} else if ("CareTeam".equals(next.getResourceType())) {
|
||||
assertThat(nextContents, containsString("\"id\":\"CT0\""));
|
||||
assertEquals(16, nextContents.split("\n").length);
|
||||
} else if ("Practitioner".equals(next.getResourceType())) {
|
||||
assertThat(nextContents, containsString("\"id\":\"PRACT0\""));
|
||||
assertEquals(11, nextContents.split("\n").length);
|
||||
} else if ("Organization".equals(next.getResourceType())) {
|
||||
assertThat(nextContents, containsString("\"id\":\"ORG0\""));
|
||||
assertEquals(11, nextContents.split("\n").length);
|
||||
} else if ("Group".equals(next.getResourceType())) {
|
||||
assertThat(nextContents, containsString("\"id\":\"G0\""));
|
||||
assertEquals(1, nextContents.split("\n").length);
|
||||
|
@ -702,7 +743,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
bulkDataExportOptions.setSince(null);
|
||||
bulkDataExportOptions.setFilters(null);
|
||||
bulkDataExportOptions.setGroupId(myPatientGroupId);
|
||||
bulkDataExportOptions.setExpandMdm(true);
|
||||
bulkDataExportOptions.setExpandMdm(false);
|
||||
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
|
||||
|
||||
|
@ -727,6 +768,58 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
assertThat(nextContents, is(containsString("IMM8")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupBatchJobFindsForwardReferencesIfNeeded() {
|
||||
createResources();
|
||||
|
||||
// Create a bulk job
|
||||
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
|
||||
bulkDataExportOptions.setOutputFormat(null);
|
||||
bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Practitioner","Organization", "Observation"));
|
||||
bulkDataExportOptions.setSince(null);
|
||||
bulkDataExportOptions.setFilters(null);
|
||||
bulkDataExportOptions.setGroupId(myPatientGroupId);
|
||||
bulkDataExportOptions.setExpandMdm(false);
|
||||
//FIXME GGG Make sure this works with MDM Enabled as well.
|
||||
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
|
||||
|
||||
myBulkDataExportJobSchedulingHelper.startSubmittedJobs();
|
||||
awaitAllBulkJobCompletions();
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
|
||||
|
||||
assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
|
||||
assertThat(jobInfo.getFiles().size(), equalTo(3));
|
||||
|
||||
// Iterate over the files
|
||||
String nextContents = getBinaryContents(jobInfo, 0);
|
||||
|
||||
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Practitioner")));
|
||||
assertThat(nextContents, is(containsString("PRACT0")));
|
||||
assertThat(nextContents, is(containsString("PRACT2")));
|
||||
assertThat(nextContents, is(containsString("PRACT4")));
|
||||
assertThat(nextContents, is(containsString("PRACT6")));
|
||||
assertThat(nextContents, is(containsString("PRACT8")));
|
||||
|
||||
nextContents = getBinaryContents(jobInfo, 1);
|
||||
assertThat(jobInfo.getFiles().get(1).getResourceType(), is(equalTo("Organization")));
|
||||
assertThat(nextContents, is(containsString("ORG0")));
|
||||
assertThat(nextContents, is(containsString("ORG2")));
|
||||
assertThat(nextContents, is(containsString("ORG4")));
|
||||
assertThat(nextContents, is(containsString("ORG6")));
|
||||
assertThat(nextContents, is(containsString("ORG8")));
|
||||
|
||||
//Ensure _backwards_ references still work
|
||||
nextContents = getBinaryContents(jobInfo, 2);
|
||||
assertThat(jobInfo.getFiles().get(2).getResourceType(), is(equalTo("Observation")));
|
||||
assertThat(nextContents, is(containsString("OBS0")));
|
||||
assertThat(nextContents, is(containsString("OBS2")));
|
||||
assertThat(nextContents, is(containsString("OBS4")));
|
||||
assertThat(nextContents, is(containsString("OBS6")));
|
||||
assertThat(nextContents, is(containsString("OBS8")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() {
|
||||
createResources();
|
||||
|
@ -867,6 +960,14 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
assertThat(nextContents, is(containsString("CT4")));
|
||||
assertThat(nextContents, is(containsString("CT6")));
|
||||
assertThat(nextContents, is(containsString("CT8")));
|
||||
|
||||
//These should be brought in via MDM.
|
||||
assertThat(nextContents, is(containsString("CT1")));
|
||||
assertThat(nextContents, is(containsString("CT3")));
|
||||
assertThat(nextContents, is(containsString("CT5")));
|
||||
assertThat(nextContents, is(containsString("CT7")));
|
||||
assertThat(nextContents, is(containsString("CT9")));
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -1209,13 +1310,23 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
private void createResources() {
|
||||
SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
Group group = new Group();
|
||||
group.setId("G0");
|
||||
|
||||
//Manually create a Practitioner
|
||||
IIdType goldenPractId = createPractitionerWithIndex(999);
|
||||
|
||||
//Manually create an Organization
|
||||
IIdType goldenOrgId = createOrganizationWithIndex(999);
|
||||
|
||||
//Manually create a golden record
|
||||
Patient goldenPatient = new Patient();
|
||||
|
||||
goldenPatient.setId("PAT999");
|
||||
SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
goldenPatient.setGeneralPractitioner(Collections.singletonList(new Reference(goldenPractId.toVersionless())));
|
||||
goldenPatient.setManagingOrganization(new Reference(goldenOrgId.toVersionless()));
|
||||
|
||||
DaoMethodOutcome g1Outcome = myPatientDao.update(goldenPatient, srd);
|
||||
Long goldenPid = runInTransaction(() -> myIdHelperService.getPidOrNull(g1Outcome.getResource()));
|
||||
|
||||
|
@ -1225,7 +1336,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
createCareTeamWithIndex(999, g1Outcome.getId());
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
DaoMethodOutcome patientOutcome = createPatientWithIndex(i);
|
||||
IIdType orgId = createOrganizationWithIndex(i);
|
||||
IIdType practId = createPractitionerWithIndex(i);
|
||||
DaoMethodOutcome patientOutcome = createPatientWithIndexAndGPAndManagingOrganization(i, practId, orgId);
|
||||
IIdType patId = patientOutcome.getId().toUnqualifiedVersionless();
|
||||
Long sourcePid = runInTransaction(() -> myIdHelperService.getPidOrNull(patientOutcome.getResource()));
|
||||
|
||||
|
@ -1254,7 +1367,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
//Create some nongroup patients MDM linked to a different golden resource. They shouldnt be included in the query.
|
||||
for (int i = 0; i < 5; i++) {
|
||||
int index = 1000 + i;
|
||||
DaoMethodOutcome patientOutcome = createPatientWithIndex(index);
|
||||
IIdType orgId = createOrganizationWithIndex(i);
|
||||
IIdType practId = createPractitionerWithIndex(i);
|
||||
DaoMethodOutcome patientOutcome = createPatientWithIndexAndGPAndManagingOrganization(index, practId, orgId);
|
||||
IIdType patId = patientOutcome.getId().toUnqualifiedVersionless();
|
||||
Long sourcePid = runInTransaction(() -> myIdHelperService.getPidOrNull(patientOutcome.getResource()));
|
||||
linkToGoldenResource(goldenPid2, sourcePid);
|
||||
|
@ -1271,12 +1386,26 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
private DaoMethodOutcome createPatientWithIndex(int i) {
|
||||
private IIdType createPractitionerWithIndex(int theIndex) {
|
||||
Practitioner pract = new Practitioner();
|
||||
pract.setId("PRACT" + theIndex);
|
||||
return myPractitionerDao.update(pract, new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.defaultPartition())).getId();
|
||||
}
|
||||
|
||||
private IIdType createOrganizationWithIndex(int theIndex) {
|
||||
Organization org = new Organization();
|
||||
org.setId("ORG" + theIndex);
|
||||
return myOrganizationDao.update(org, new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.defaultPartition())).getId();
|
||||
}
|
||||
|
||||
private DaoMethodOutcome createPatientWithIndexAndGPAndManagingOrganization(int theIndex, IIdType thePractId, IIdType theOrgId) {
|
||||
Patient patient = new Patient();
|
||||
patient.setId("PAT" + i);
|
||||
patient.setGender(i % 2 == 0 ? Enumerations.AdministrativeGender.MALE : Enumerations.AdministrativeGender.FEMALE);
|
||||
patient.addName().setFamily("FAM" + i);
|
||||
patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i);
|
||||
patient.setId("PAT" + theIndex);
|
||||
patient.setGender(theIndex % 2 == 0 ? Enumerations.AdministrativeGender.MALE : Enumerations.AdministrativeGender.FEMALE);
|
||||
patient.addName().setFamily("FAM" + theIndex);
|
||||
patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + theIndex);
|
||||
patient.setManagingOrganization(new Reference(theOrgId.toVersionless()));
|
||||
patient.setGeneralPractitioner(Collections.singletonList(new Reference(thePractId.toVersionless())));
|
||||
return myPatientDao.update(patient, new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.defaultPartition()));
|
||||
}
|
||||
|
||||
|
|
|
@ -499,7 +499,7 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
|
|||
myPatientDao.search(params);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(Msg.code(1210) + "Unknown parameter name: Observation:soooooobject", e.getMessage());
|
||||
assertEquals(Msg.code(1209) + "Unknown parameter name: Observation:soooooobject", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaDstu3Test;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.test.concurrency.PointcutLatch;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
|
@ -61,7 +62,7 @@ public class ExpungeHookTest extends BaseJpaDstu3Test {
|
|||
myEverythingLatch.setExpectedCount(1);
|
||||
ExpungeOptions options = new ExpungeOptions();
|
||||
options.setExpungeEverything(true);
|
||||
myExpungeService.expunge(null, null, null, options, null);
|
||||
myExpungeService.expunge(null, null, options, null);
|
||||
myEverythingLatch.awaitExpected();
|
||||
|
||||
assertPatientGone(id);
|
||||
|
@ -94,7 +95,7 @@ public class ExpungeHookTest extends BaseJpaDstu3Test {
|
|||
|
||||
// Expunge everything with the service.
|
||||
myEverythingLatch.setExpectedCount(1);
|
||||
myExpungeService.expunge(null, null, null, options, mySrd);
|
||||
myExpungeService.expunge(null, null, options, mySrd);
|
||||
myEverythingLatch.awaitExpected();
|
||||
assertPatientGone(id);
|
||||
|
||||
|
@ -122,7 +123,7 @@ public class ExpungeHookTest extends BaseJpaDstu3Test {
|
|||
options.setExpungeDeletedResources(true);
|
||||
|
||||
myExpungeResourceLatch.setExpectedCount(2);
|
||||
myExpungeService.expunge("Patient", expungeId.getIdPartAsLong(), null, options, null);
|
||||
myExpungeService.expunge("Patient", new ResourcePersistentId(expungeId.getIdPartAsLong()), options, null);
|
||||
HookParams hookParams = myExpungeResourceLatch.awaitExpected().get(0);
|
||||
|
||||
IIdType hookId = hookParams.get(IIdType.class);
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.test.concurrency.PointcutLatch;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
|
@ -34,8 +35,8 @@ public class PartitionRunnerTest {
|
|||
|
||||
@Test
|
||||
public void emptyList() {
|
||||
List<Long> resourceIds = buildPidList(0);
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
List<ResourcePersistentId> resourceIds = buildPidList(0);
|
||||
Consumer<List<ResourcePersistentId>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(0);
|
||||
|
||||
getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
|
@ -54,19 +55,19 @@ public class PartitionRunnerTest {
|
|||
return new PartitionRunner("TEST", "test", theBatchSize, theThreadCount);
|
||||
}
|
||||
|
||||
private List<Long> buildPidList(int size) {
|
||||
List<Long> list = new ArrayList<>();
|
||||
private List<ResourcePersistentId> buildPidList(int size) {
|
||||
List<ResourcePersistentId> list = new ArrayList<>();
|
||||
for (long i = 0; i < size; ++i) {
|
||||
list.add(i + 1);
|
||||
list.add(new ResourcePersistentId(i + 1));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void oneItem() throws InterruptedException {
|
||||
List<Long> resourceIds = buildPidList(1);
|
||||
List<ResourcePersistentId> resourceIds = buildPidList(1);
|
||||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
Consumer<List<ResourcePersistentId>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(1);
|
||||
getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected());
|
||||
|
@ -77,9 +78,9 @@ public class PartitionRunnerTest {
|
|||
|
||||
@Test
|
||||
public void twoItems() throws InterruptedException {
|
||||
List<Long> resourceIds = buildPidList(2);
|
||||
List<ResourcePersistentId> resourceIds = buildPidList(2);
|
||||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
Consumer<List<ResourcePersistentId>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(1);
|
||||
getPartitionRunner().runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
PartitionCall partitionCall = (PartitionCall) PointcutLatch.getLatchInvocationParameter(myLatch.awaitExpected());
|
||||
|
@ -89,9 +90,9 @@ public class PartitionRunnerTest {
|
|||
|
||||
@Test
|
||||
public void tenItemsBatch5() throws InterruptedException {
|
||||
List<Long> resourceIds = buildPidList(10);
|
||||
List<ResourcePersistentId> resourceIds = buildPidList(10);
|
||||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
Consumer<List<ResourcePersistentId>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(2);
|
||||
getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
|
@ -106,13 +107,13 @@ public class PartitionRunnerTest {
|
|||
|
||||
@Test
|
||||
public void nineItemsBatch5() throws InterruptedException {
|
||||
List<Long> resourceIds = buildPidList(9);
|
||||
List<ResourcePersistentId> resourceIds = buildPidList(9);
|
||||
|
||||
// We don't care in which order, but one partition size should be
|
||||
// 5 and one should be 4
|
||||
Set<Integer> nums = Sets.newHashSet(5, 4);
|
||||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
Consumer<List<ResourcePersistentId>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(2);
|
||||
getPartitionRunner(5).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
|
@ -127,9 +128,9 @@ public class PartitionRunnerTest {
|
|||
|
||||
@Test
|
||||
public void tenItemsOneThread() throws InterruptedException {
|
||||
List<Long> resourceIds = buildPidList(10);
|
||||
List<ResourcePersistentId> resourceIds = buildPidList(10);
|
||||
|
||||
Consumer<List<Long>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
Consumer<List<ResourcePersistentId>> partitionConsumer = buildPartitionConsumer(myLatch);
|
||||
myLatch.setExpectedCount(2);
|
||||
getPartitionRunner(5, 1).runInPartitionedThreads(resourceIds, partitionConsumer);
|
||||
List<HookParams> calls = myLatch.awaitExpected();
|
||||
|
@ -145,7 +146,7 @@ public class PartitionRunnerTest {
|
|||
}
|
||||
}
|
||||
|
||||
private Consumer<List<Long>> buildPartitionConsumer(PointcutLatch latch) {
|
||||
private Consumer<List<ResourcePersistentId>> buildPartitionConsumer(PointcutLatch latch) {
|
||||
return list -> latch.call(new PartitionCall(Thread.currentThread().getName(), list.size()));
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerOperationInterceptor;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -32,6 +33,7 @@ import static org.hamcrest.Matchers.containsString;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.inOrder;
|
||||
|
@ -511,6 +513,7 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
|
|||
|
||||
Patient p = new Patient();
|
||||
p.setActive(false);
|
||||
|
||||
IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
p = myPatientDao.read(id);
|
||||
|
@ -518,6 +521,37 @@ public class FhirResourceDaoR4InterceptorTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPrestorageClientAssignedIdInterceptorCanDenyClientAssignedIds() {
|
||||
Object interceptor = new Object() {
|
||||
@Hook(Pointcut.STORAGE_PRESTORAGE_CLIENT_ASSIGNED_ID)
|
||||
public void prestorageClientAssignedId(IBaseResource theResource, RequestDetails theRequest) {
|
||||
throw new ForbiddenOperationException("Client assigned id rejected.");
|
||||
}
|
||||
};
|
||||
mySrdInterceptorService.registerInterceptor(interceptor);
|
||||
|
||||
{//Ensure interceptor is not invoked on create.
|
||||
Patient serverAssignedPatient = new Patient();
|
||||
try {
|
||||
myPatientDao.create(serverAssignedPatient, mySrd);
|
||||
} catch (ForbiddenOperationException e) {
|
||||
fail("Interceptor was invoked, and should not have been!");
|
||||
}
|
||||
}
|
||||
|
||||
{//Ensure attempting to set a client assigned id is rejected by our interceptor.
|
||||
try {
|
||||
Patient clientAssignedPatient = new Patient();
|
||||
clientAssignedPatient.setId("Patient/custom-id");
|
||||
myPatientDao.update(clientAssignedPatient, mySrd);
|
||||
fail();
|
||||
} catch (ForbiddenOperationException e) {
|
||||
assertEquals("Client assigned id rejected.", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Make sure that both JPA interceptors and RestfulServer interceptors can
|
||||
* get called
|
||||
|
|
|
@ -1157,7 +1157,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
myPatientDao.search(params);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(Msg.code(1210) + "Unknown parameter name: Observation:soooooobject", e.getMessage());
|
||||
assertEquals(Msg.code(1209) + "Unknown parameter name: Observation:soooooobject", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -639,7 +639,7 @@ public class FhirResourceDaoR4SearchNoHashesTest extends BaseJpaR4Test {
|
|||
myPatientDao.search(params);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(Msg.code(1210) + "Unknown parameter name: Observation:soooooobject", e.getMessage());
|
||||
assertEquals(Msg.code(1209) + "Unknown parameter name: Observation:soooooobject", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -92,6 +92,7 @@ import static ca.uhn.fhir.jpa.model.util.UcumServiceUtil.UCUM_CODESYSTEM_URL;
|
|||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
@ -1184,6 +1185,78 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMultipleComponentsHandlesAndOr() {
|
||||
Observation obs1 = getObservation();
|
||||
addComponentWithCodeAndQuantity(obs1, "8480-6", 107);
|
||||
addComponentWithCodeAndQuantity(obs1, "8462-4", 60);
|
||||
|
||||
IIdType obs1Id = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = getObservation();
|
||||
addComponentWithCodeAndQuantity(obs2, "8480-6",307);
|
||||
addComponentWithCodeAndQuantity(obs2, "8462-4",260);
|
||||
|
||||
myObservationDao.create(obs2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
// andClauses
|
||||
{
|
||||
String theUrl = "/Observation?component-value-quantity=107&component-value-quantity=60";
|
||||
List<String> resourceIds = myTestDaoSearch.searchForIds(theUrl);
|
||||
assertThat("when same component with qtys 107 and 60", resourceIds, hasItem(equalTo(obs1Id.getIdPart())));
|
||||
}
|
||||
{
|
||||
String theUrl = "/Observation?component-value-quantity=107&component-value-quantity=260";
|
||||
List<String> resourceIds = myTestDaoSearch.searchForIds(theUrl);
|
||||
assertThat("when same component with qtys 107 and 260", resourceIds, empty());
|
||||
}
|
||||
|
||||
//andAndOrClauses
|
||||
{
|
||||
String theUrl = "/Observation?component-value-quantity=107&component-value-quantity=gt50,lt70";
|
||||
List<String> resourceIds = myTestDaoSearch.searchForIds(theUrl);
|
||||
assertThat("when same component with qtys 107 and lt70,gt80", resourceIds, hasItem(equalTo(obs1Id.getIdPart())));
|
||||
}
|
||||
{
|
||||
String theUrl = "/Observation?component-value-quantity=50,70&component-value-quantity=260";
|
||||
List<String> resourceIds = myTestDaoSearch.searchForIds(theUrl);
|
||||
assertThat("when same component with qtys 50,70 and 260", resourceIds, empty());
|
||||
}
|
||||
|
||||
// multipleAndsWithMultipleOrsEach
|
||||
{
|
||||
String theUrl = "/Observation?component-value-quantity=50,60&component-value-quantity=105,107";
|
||||
List<String> resourceIds = myTestDaoSearch.searchForIds(theUrl);
|
||||
assertThat("when same component with qtys 50,60 and 105,107", resourceIds, hasItem(equalTo(obs1Id.getIdPart())));
|
||||
}
|
||||
{
|
||||
String theUrl = "/Observation?component-value-quantity=50,60&component-value-quantity=250,260";
|
||||
List<String> resourceIds = myTestDaoSearch.searchForIds(theUrl);
|
||||
assertThat("when same component with qtys 50,60 and 250,260", resourceIds, empty());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private Observation getObservation() {
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().addCoding().setCode("85354-9").setSystem("http://loinc.org");
|
||||
obs.setStatus(Observation.ObservationStatus.FINAL);
|
||||
return obs;
|
||||
}
|
||||
|
||||
private Quantity getQuantity(double theValue) {
|
||||
return new Quantity().setValue(theValue).setUnit("mmHg").setSystem("http://unitsofmeasure.org").setCode("mm[Hg]");
|
||||
}
|
||||
|
||||
private Observation.ObservationComponentComponent addComponentWithCodeAndQuantity(Observation theObservation, String theConceptCode, double theQuantityValue) {
|
||||
Observation.ObservationComponentComponent comp = theObservation.addComponent();
|
||||
CodeableConcept cc1_1 = new CodeableConcept();
|
||||
cc1_1.addCoding().setCode(theConceptCode).setSystem("http://loinc.org");
|
||||
comp.setCode(cc1_1);
|
||||
comp.setValue(getQuantity(theQuantityValue));
|
||||
return comp;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,601 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHibernateSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
import ca.uhn.fhir.rest.param.QuantityParam;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.storage.test.DaoTestDataBuilder;
|
||||
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
||||
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.MatchPredicateOptionsStep;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.PredicateFinalStep;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.SearchPredicateFactory;
|
||||
import org.hibernate.search.engine.search.query.SearchResult;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.model.search.HibernateSearchIndexWriter.NESTED_SEARCH_PARAM_ROOT;
|
||||
import static ca.uhn.fhir.jpa.model.search.HibernateSearchIndexWriter.QTY_CODE;
|
||||
import static ca.uhn.fhir.jpa.model.search.HibernateSearchIndexWriter.QTY_PARAM_NAME;
|
||||
import static ca.uhn.fhir.jpa.model.search.HibernateSearchIndexWriter.QTY_SYSTEM;
|
||||
import static ca.uhn.fhir.jpa.model.search.HibernateSearchIndexWriter.QTY_VALUE;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* Just a sandbox. Never intended to run by pipes
|
||||
*/
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = {
|
||||
TestR4Config.class,
|
||||
TestHibernateSearchAddInConfig.Elasticsearch.class,
|
||||
DaoTestDataBuilder.Config.class,
|
||||
TestDaoSearch.Config.class
|
||||
})
|
||||
@Disabled
|
||||
public class HibernateSearchSandboxTest extends BaseJpaTest {
|
||||
|
||||
@Autowired
|
||||
private EntityManager myEntityManager;
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
|
||||
@Autowired
|
||||
private ITestDataBuilder myTestDataBuilder;
|
||||
|
||||
@Autowired
|
||||
private IResourceReindexingSvc myResourceReindexingSvc;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("mySystemDaoR4")
|
||||
private IFhirSystemDao<Bundle, Meta> mySystemDao;
|
||||
|
||||
@Autowired
|
||||
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||
|
||||
@Autowired
|
||||
protected ISearchParamRegistry mySearchParamRegistry;
|
||||
|
||||
@Autowired
|
||||
private IBulkDataExportJobSchedulingHelper myBulkDataScheduleHelper;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myObservationDaoR4")
|
||||
private IFhirResourceDao<Observation> myObservationDao;
|
||||
|
||||
// @BeforeEach
|
||||
// public void beforePurgeDatabase() {
|
||||
// purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataScheduleHelper);
|
||||
// }
|
||||
|
||||
@BeforeEach
|
||||
public void enableContainsAndLucene() {
|
||||
myDaoConfig.setAllowContainsSearches(true);
|
||||
myDaoConfig.setAdvancedLuceneIndexing(true);
|
||||
}
|
||||
|
||||
|
||||
@Nested
|
||||
public class NotNestedObjectQueries {
|
||||
/**
|
||||
* Show that when there is only one and clause with "or" entries, we can add the shoulds
|
||||
* at the top level
|
||||
*/
|
||||
@Test
|
||||
public void searchModelingMultipleAndWithOneOringClauseTest() {
|
||||
String system = "http://loinc.org";
|
||||
Observation obs1 = new Observation();
|
||||
obs1.getCode().setText("Systolic Blood Pressure");
|
||||
obs1.getCode().addCoding().setCode("obs1").setSystem(system).setDisplay("Systolic Blood Pressure");
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setValue(new Quantity(123));
|
||||
obs1.getNoteFirstRep().setText("obs1");
|
||||
IIdType id1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
runInTransaction(() -> {
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
SearchResult<ResourceTable> result = searchSession.search(ResourceTable.class)
|
||||
.where(f -> f.bool(b -> {
|
||||
b.must(f.match().field("myResourceType").matching("Observation"));
|
||||
b.must(f.match().field("sp.code.token.system").matching("http://loinc.org"));
|
||||
b.should(f.match().field("sp.code.token.code").matching("obs3"));
|
||||
b.should(f.match().field("sp.code.token.code").matching("obs1"));
|
||||
b.minimumShouldMatchNumber(1);
|
||||
}))
|
||||
.fetchAll();
|
||||
long totalHitCount = result.total().hitCount();
|
||||
// List<ResourceTable> hits = result.hits();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Shows that when there is multiple "and" clause with "or" entries, we need to group each one in a "must" clause
|
||||
* to be able to add a minimumShouldMatchNumber(1); to each group
|
||||
*/
|
||||
@Test
|
||||
public void searchModelingMultipleAndWithMultipleOrClausesTest() {
|
||||
String system = "http://loinc.org";
|
||||
Observation obs1 = new Observation();
|
||||
obs1.getCode().setText("Systolic Blood Pressure");
|
||||
obs1.getCode().addCoding().setCode("obs1").setSystem(system).setDisplay("Systolic Blood Pressure");
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setValue(new Quantity(123));
|
||||
obs1.getNoteFirstRep().setText("obs1");
|
||||
IIdType id1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
runInTransaction(() -> {
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
SearchResult<ResourceTable> result = searchSession.search(ResourceTable.class)
|
||||
.where(f -> f.bool(b -> {
|
||||
b.must(f.match().field("myResourceType").matching("Observation"));
|
||||
b.must(f.match().field("sp.code.token.system").matching("http://loinc.org"));
|
||||
|
||||
b.must(f.bool(p -> {
|
||||
p.should(f.match().field("sp.code.token.code").matching("obs3"));
|
||||
p.should(f.match().field("sp.code.token.code").matching("obs1"));
|
||||
p.minimumShouldMatchNumber(1);
|
||||
}));
|
||||
|
||||
b.must(f.bool(p -> {
|
||||
p.should(f.match().field("sp.code.token.code").matching("obs5"));
|
||||
p.should(f.match().field("sp.code.token.code").matching("obs1"));
|
||||
p.minimumShouldMatchNumber(1);
|
||||
}));
|
||||
}))
|
||||
.fetchAll();
|
||||
long totalHitCount = result.total().hitCount();
|
||||
// List<ResourceTable> hits = result.hits();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class NestedObjectQueries {
|
||||
|
||||
/**
|
||||
* Show that when there is only one and clause with "or" entries, we can add the shoulds
|
||||
* at the top level
|
||||
*/
|
||||
@Test
|
||||
public void searchModelingAndMultipleAndWithOneOringClauseTest() {
|
||||
IIdType myResourceId = myTestDataBuilder.createObservation(myTestDataBuilder.withElementAt("valueQuantity",
|
||||
myTestDataBuilder.withPrimitiveAttribute("value", 0.6)
|
||||
// myTestDataBuilder.withPrimitiveAttribute("system", UCUM_CODESYSTEM_URL),
|
||||
// myTestDataBuilder.withPrimitiveAttribute("code", "mm[Hg]")
|
||||
));
|
||||
|
||||
runInTransaction(() -> {
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
SearchResult<ResourceTable> result = searchSession.search(ResourceTable.class)
|
||||
.where(f -> f.bool(b -> {
|
||||
b.must(f.match().field("myResourceType").matching("Observation"));
|
||||
b.must(f.nested().objectField("nsp.value-quantity")
|
||||
.nest(f.bool()
|
||||
.must(f.range().field("nsp.value-quantity.quantity.value").lessThan(0.7))
|
||||
.should(f.range().field("nsp.value-quantity.quantity.value").between(0.475, 0.525))
|
||||
.should(f.range().field("nsp.value-quantity.quantity.value").between(0.57, 0.63))
|
||||
.minimumShouldMatchNumber(1)
|
||||
));
|
||||
}))
|
||||
.fetchAll();
|
||||
// long totalHitCount = result.total().hitCount();
|
||||
// List<ResourceTable> hits = result.hits();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Shows that when there is multiple "and" clause with "or" entries, we need to group each one in a "must" clause
|
||||
* to be able to add a minimumShouldMatchNumber(1); to each group
|
||||
*/
|
||||
@Test
|
||||
public void searchModelingMultipleAndWithMultipleOrClausesTest() {
|
||||
IIdType myResourceId = myTestDataBuilder.createObservation(myTestDataBuilder.withElementAt("valueQuantity",
|
||||
myTestDataBuilder.withPrimitiveAttribute("value", 0.6)
|
||||
// myTestDataBuilder.withPrimitiveAttribute("system", UCUM_CODESYSTEM_URL),
|
||||
// myTestDataBuilder.withPrimitiveAttribute("code", "mm[Hg]")
|
||||
));
|
||||
|
||||
runInTransaction(() -> {
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
SearchResult<ResourceTable> result = searchSession.search(ResourceTable.class)
|
||||
.where(f -> f.bool(b -> {
|
||||
b.must(f.match().field("myResourceType").matching("Observation"));
|
||||
b.must(f.nested().objectField("nsp.value-quantity")
|
||||
.nest(f.bool()
|
||||
.must(f.range().field("nsp.value-quantity.quantity.value").lessThan(0.7))
|
||||
|
||||
.must(f.bool(p -> {
|
||||
p.should(f.range().field("nsp.value-quantity.quantity.value").between(0.475, 0.525));
|
||||
p.should(f.range().field("nsp.value-quantity.quantity.value").between(0.57, 0.63));
|
||||
p.minimumShouldMatchNumber(1);
|
||||
}))
|
||||
|
||||
.must(f.bool(p -> {
|
||||
p.should(f.range().field("nsp.value-quantity.quantity.value").between(0.2, 0.8));
|
||||
p.should(f.range().field("nsp.value-quantity.quantity.value").between(0.7, 0.9));
|
||||
p.minimumShouldMatchNumber(1);
|
||||
}))
|
||||
|
||||
.minimumShouldMatchNumber(1)
|
||||
));
|
||||
}))
|
||||
.fetchAll();
|
||||
// long totalHitCount = result.total().hitCount();
|
||||
// List<ResourceTable> hits = result.hits();
|
||||
});
|
||||
// runInTransaction(() -> {
|
||||
// SearchSession searchSession = Search.session(myEntityManager);
|
||||
// SearchResult<ResourceTable> result = searchSession.search(ResourceTable.class)
|
||||
// .where(f -> f.bool(b -> {
|
||||
// b.must(f.match().field("myResourceType").matching("Observation"));
|
||||
// b.must(f.bool()
|
||||
// .must(f.range().field("nsp.value-quantity.quantity.value").lessThan(0.7))
|
||||
//
|
||||
// .must(f.bool(p -> {
|
||||
// p.should(f.range().field("nsp.value-quantity.quantity.value").between(0.475, 0.525));
|
||||
// p.should(f.range().field("nsp.value-quantity.quantity.value").between(0.57, 0.63));
|
||||
// p.minimumShouldMatchNumber(1);
|
||||
// }))
|
||||
//
|
||||
// .must(f.bool(p -> {
|
||||
// p.should(f.range().field("nsp.value-quantity.quantity.value").between(0.2, 0.8));
|
||||
// p.should(f.range().field("nsp.value-quantity.quantity.value").between(0.7, 0.9));
|
||||
// p.minimumShouldMatchNumber(1);
|
||||
// }))
|
||||
//
|
||||
// .minimumShouldMatchNumber(1)
|
||||
// );
|
||||
// }))
|
||||
// .fetchAll();
|
||||
//// long totalHitCount = result.total().hitCount();
|
||||
//// List<ResourceTable> hits = result.hits();
|
||||
// });
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Following code is the beginning of refactoring the queries for cleaner structure, which means
|
||||
* to try to achieve the clean query structure modeled by previous tests, but using generic methods
|
||||
*/
|
||||
@Nested
|
||||
public class FragmentedCodeNotNested {
|
||||
|
||||
private SearchPredicateFactory fact;
|
||||
|
||||
|
||||
@Test
|
||||
public void searchModelingMultipleAndOneOrClauseTest() {
|
||||
String system = "http://loinc.org";
|
||||
Observation obs1 = new Observation();
|
||||
obs1.getCode().setText("Systolic Blood Pressure");
|
||||
obs1.getCode().addCoding().setCode("obs1").setSystem(system).setDisplay("Systolic Blood Pressure");
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setValue(new Quantity(123));
|
||||
obs1.getNoteFirstRep().setText("obs1");
|
||||
IIdType id1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
String paramName = "value-quantity";
|
||||
List<List<IQueryParameterType>> theQuantityAndOrTerms = Lists.newArrayList();
|
||||
|
||||
theQuantityAndOrTerms.add(Collections.singletonList(
|
||||
new QuantityParam().setValue(0.7)));
|
||||
|
||||
theQuantityAndOrTerms.add(Lists.newArrayList(
|
||||
new QuantityParam().setValue(0.5),
|
||||
new QuantityParam().setValue(0.6)
|
||||
));
|
||||
|
||||
runInTransaction(() -> {
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
SearchResult<ResourceTable> result = searchSession.search(ResourceTable.class)
|
||||
.where(f -> {
|
||||
TestPredBuilder builder = new TestPredBuilder(f);
|
||||
return builder.buildAndOrPredicates(paramName, theQuantityAndOrTerms);
|
||||
})
|
||||
.fetchAll();
|
||||
long totalHitCount = result.total().hitCount();
|
||||
// List<ResourceTable> hits = result.hits();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void searchModelingMultipleAndMultipleOrClauseTest() {
|
||||
String system = "http://loinc.org";
|
||||
Observation obs1 = new Observation();
|
||||
obs1.getCode().setText("Systolic Blood Pressure");
|
||||
obs1.getCode().addCoding().setCode("obs1").setSystem(system).setDisplay("Systolic Blood Pressure");
|
||||
obs1.setStatus(Observation.ObservationStatus.FINAL);
|
||||
obs1.setValue(new Quantity(123));
|
||||
obs1.getNoteFirstRep().setText("obs1");
|
||||
IIdType id1 = myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
String paramName = "value-quantity";
|
||||
List<List<IQueryParameterType>> theQuantityAndOrTerms = Lists.newArrayList();
|
||||
|
||||
theQuantityAndOrTerms.add(Collections.singletonList(
|
||||
new QuantityParam().setValue(0.7)));
|
||||
|
||||
theQuantityAndOrTerms.add(Lists.newArrayList(
|
||||
new QuantityParam().setValue(0.5),
|
||||
new QuantityParam().setValue(0.6)
|
||||
));
|
||||
|
||||
theQuantityAndOrTerms.add(Lists.newArrayList(
|
||||
new QuantityParam().setValue(0.9),
|
||||
new QuantityParam().setValue(0.6)
|
||||
));
|
||||
|
||||
runInTransaction(() -> {
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
SearchResult<ResourceTable> result = searchSession.search(ResourceTable.class)
|
||||
.where(f -> {
|
||||
TestPredBuilder builder = new TestPredBuilder(f);
|
||||
return builder.buildAndOrPredicates(paramName, theQuantityAndOrTerms);
|
||||
})
|
||||
.fetchAll();
|
||||
long totalHitCount = result.total().hitCount();
|
||||
// List<ResourceTable> hits = result.hits();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static class TestPredBuilder {
|
||||
|
||||
private static final double QTY_APPROX_TOLERANCE_PERCENT = .10;
|
||||
private static final double QTY_TOLERANCE_PERCENT = .05;
|
||||
|
||||
SearchPredicateFactory myPredicateFactory;
|
||||
|
||||
public TestPredBuilder(SearchPredicateFactory theF) { myPredicateFactory = theF; }
|
||||
|
||||
|
||||
public PredicateFinalStep buildAndOrPredicates(
|
||||
String theSearchParamName, List<List<IQueryParameterType>> theAndOrTerms) {
|
||||
|
||||
boolean isNested = isNested(theSearchParamName);
|
||||
|
||||
// we need to know if there is more than one "and" predicate (outer list) with more than one "or" predicate (inner list)
|
||||
long maxOrPredicateSize = theAndOrTerms.stream().map(List::size).filter(s -> s > 1).count();
|
||||
|
||||
BooleanPredicateClausesStep<?> topBool = myPredicateFactory.bool();
|
||||
topBool.must(myPredicateFactory.match().field("myResourceType").matching("Observation"));
|
||||
|
||||
BooleanPredicateClausesStep<?> activeBool = topBool;
|
||||
if (isNested) {
|
||||
BooleanPredicateClausesStep<?> nestedBool = myPredicateFactory.bool();
|
||||
activeBool = nestedBool;
|
||||
}
|
||||
|
||||
for (List<IQueryParameterType> andTerm : theAndOrTerms) {
|
||||
if (andTerm.size() == 1) {
|
||||
// buildSinglePredicate
|
||||
// activeBool.must(myPredicateFactory.match().field("nsp.value-quantity.quantity.value").matching(0.7));
|
||||
addOnePredicate(activeBool, true, theSearchParamName, andTerm.get(0));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (maxOrPredicateSize <= 1) {
|
||||
// this is the only list of or predicates with more than 1 entry so
|
||||
// no need to separate it in a group. Can be part of main and clauses
|
||||
for (IQueryParameterType orTerm : andTerm) {
|
||||
addOnePredicate(activeBool, false, theSearchParamName, orTerm);
|
||||
}
|
||||
activeBool.minimumShouldMatchNumber(1);
|
||||
|
||||
} else {
|
||||
// this is not the only list of or predicates with more than 1 entry
|
||||
// so all of them need to be separated in groups with a minimumShouldMatchNumber(1)
|
||||
activeBool.must(myPredicateFactory.bool(p -> {
|
||||
for (IQueryParameterType orTerm : andTerm) {
|
||||
addOnePredicate(p, false, theSearchParamName, orTerm);
|
||||
}
|
||||
p.minimumShouldMatchNumber(1);
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
if (isNested) {
|
||||
topBool.must(myPredicateFactory.nested().objectField("nsp.value-quantity").nest(activeBool));
|
||||
}
|
||||
return topBool;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private boolean isNested(String theSearchParamName) {
|
||||
if (theSearchParamName.equals("value-quantity")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
private void addOnePredicate(BooleanPredicateClausesStep<?> theTopBool, boolean theIsMust,
|
||||
String theParamName, IQueryParameterType theParameterType) {
|
||||
|
||||
if (theParameterType instanceof QuantityParam) {
|
||||
addQuantityOrClauses(theTopBool, theIsMust, theParamName, theParameterType);
|
||||
return;
|
||||
}
|
||||
|
||||
throw new IllegalStateException("Shouldn't reach this code");
|
||||
}
|
||||
|
||||
|
||||
private void addQuantityOrClauses(BooleanPredicateClausesStep<?> theTopBool, boolean theIsMust,
|
||||
String theSearchParamName, IQueryParameterType theParamType) {
|
||||
|
||||
String fieldPath = NESTED_SEARCH_PARAM_ROOT + "." + theSearchParamName + "." + QTY_PARAM_NAME;
|
||||
|
||||
QuantityParam qtyParam = QuantityParam.toQuantityParam(theParamType);
|
||||
ParamPrefixEnum activePrefix = qtyParam.getPrefix() == null ? ParamPrefixEnum.EQUAL : qtyParam.getPrefix();
|
||||
|
||||
// if (myModelConfig.getNormalizedQuantitySearchLevel() == NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED) {
|
||||
// QuantityParam canonicalQty = UcumServiceUtil.toCanonicalQuantityOrNull(qtyParam);
|
||||
// if (canonicalQty != null) {
|
||||
// String valueFieldPath = fieldPath + "." + QTY_VALUE_NORM;
|
||||
// setPrefixedQuantityPredicate(orQuantityTerms, activePrefix, canonicalQty, valueFieldPath);
|
||||
// orQuantityTerms.must(myPredicateFactory.match()
|
||||
// .field(fieldPath + "." + QTY_CODE_NORM)
|
||||
// .matching(canonicalQty.getUnits()));
|
||||
// return orQuantityTerms;
|
||||
// }
|
||||
// }
|
||||
|
||||
// not NORMALIZED_QUANTITY_SEARCH_SUPPORTED or non-canonicalizable parameter
|
||||
addQuantityTerms(theTopBool, theIsMust, activePrefix, qtyParam, fieldPath);
|
||||
}
|
||||
|
||||
|
||||
private void addQuantityTerms(BooleanPredicateClausesStep<?> theTopBool, boolean theIsMust,
|
||||
ParamPrefixEnum theActivePrefix, QuantityParam theQtyParam, String theFieldPath) {
|
||||
|
||||
String valueFieldPath = theFieldPath + "." + QTY_VALUE;
|
||||
PredicateFinalStep rangePred = getPrefixedRangePredicate(theActivePrefix, theQtyParam, valueFieldPath);
|
||||
addMustOrShould(theIsMust, theTopBool, rangePred);
|
||||
|
||||
if (isNotBlank(theQtyParam.getSystem())) {
|
||||
addFieldPredicate(theIsMust, theTopBool, theFieldPath + "." + QTY_SYSTEM, theQtyParam.getSystem());
|
||||
}
|
||||
|
||||
if (isNotBlank(theQtyParam.getUnits())) {
|
||||
addFieldPredicate(theIsMust, theTopBool, theFieldPath + "." + QTY_CODE, theQtyParam.getUnits());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void addFieldPredicate(boolean theIsMust, BooleanPredicateClausesStep<?> theTopBool, String theFieldPath, String theValue) {
|
||||
MatchPredicateOptionsStep<?> pred = myPredicateFactory.match().field(theFieldPath).matching(theValue);
|
||||
addMustOrShould(theIsMust, theTopBool, pred);
|
||||
}
|
||||
|
||||
private void addMustOrShould(boolean theIsMust, BooleanPredicateClausesStep<?> theTopBool, PredicateFinalStep thePredicate) {
|
||||
if (theIsMust) {
|
||||
theTopBool.must(thePredicate);
|
||||
} else {
|
||||
theTopBool.should(thePredicate);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private PredicateFinalStep getPrefixedRangePredicate(
|
||||
ParamPrefixEnum thePrefix, QuantityParam theQuantity, String valueFieldPath) {
|
||||
|
||||
double value = theQuantity.getValue().doubleValue();
|
||||
double approxTolerance = value * QTY_APPROX_TOLERANCE_PERCENT;
|
||||
double defaultTolerance = value * QTY_TOLERANCE_PERCENT;
|
||||
|
||||
switch (thePrefix) {
|
||||
// searches for resource quantity between passed param value +/- 10%
|
||||
case APPROXIMATE:
|
||||
return myPredicateFactory.range()
|
||||
.field(valueFieldPath)
|
||||
.between(value - approxTolerance, value + approxTolerance);
|
||||
|
||||
// searches for resource quantity between passed param value +/- 5%
|
||||
case EQUAL:
|
||||
return myPredicateFactory.range()
|
||||
.field(valueFieldPath)
|
||||
.between(value - defaultTolerance, value + defaultTolerance);
|
||||
|
||||
// searches for resource quantity > param value
|
||||
case GREATERTHAN:
|
||||
case STARTS_AFTER: // treated as GREATERTHAN because search doesn't handle ranges
|
||||
return myPredicateFactory.range()
|
||||
.field(valueFieldPath)
|
||||
.greaterThan(value);
|
||||
|
||||
// searches for resource quantity not < param value
|
||||
case GREATERTHAN_OR_EQUALS:
|
||||
return myPredicateFactory.range()
|
||||
.field(valueFieldPath)
|
||||
.atLeast(value);
|
||||
|
||||
// searches for resource quantity < param value
|
||||
case LESSTHAN:
|
||||
case ENDS_BEFORE: // treated as LESSTHAN because search doesn't handle ranges
|
||||
return myPredicateFactory.range()
|
||||
.field(valueFieldPath)
|
||||
.lessThan(value);
|
||||
|
||||
// searches for resource quantity not > param value
|
||||
case LESSTHAN_OR_EQUALS:
|
||||
return myPredicateFactory.range()
|
||||
.field(valueFieldPath)
|
||||
.atMost(value);
|
||||
|
||||
// NOT_EQUAL: searches for resource quantity not between passed param value +/- 5%
|
||||
case NOT_EQUAL:
|
||||
return myPredicateFactory.bool(b -> {
|
||||
b.should(myPredicateFactory.range()
|
||||
.field(valueFieldPath)
|
||||
.between(null, value - defaultTolerance));
|
||||
b.should(myPredicateFactory.range()
|
||||
.field(valueFieldPath)
|
||||
.between(value + defaultTolerance, null));
|
||||
b.minimumShouldMatchNumber(1);
|
||||
});
|
||||
}
|
||||
throw new IllegalStateException("Should not reach here");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected FhirContext getFhirContext() {
|
||||
return myFhirContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PlatformTransactionManager getTxManager() {
|
||||
return myTxManager;
|
||||
}
|
||||
}
|
|
@ -6,6 +6,10 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.http.client.utils.URLEncodedUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.Appointment;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
|
@ -18,22 +22,21 @@ import org.junit.jupiter.api.AfterEach;
|
|||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices.SEARCH_ID_PARAM;
|
||||
import static ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices.SEARCH_OFFSET_PARAM;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
@ -56,16 +59,16 @@ public class DaoRegistryGraphQLStorageServicesTest extends BaseJpaR4Test {
|
|||
myDaoConfig.setFilterParameterEnabled(true);
|
||||
}
|
||||
|
||||
private String createSomeAppointment() {
|
||||
CodeableConcept someCodeableConcept = new CodeableConcept(new Coding("TEST_SYSTEM", "TEST_CODE", "TEST_DISPLAY"));
|
||||
private void createSomeAppointmentWithType(String id, CodeableConcept type) {
|
||||
Appointment someAppointment = new Appointment();
|
||||
someAppointment.setAppointmentType(someCodeableConcept);
|
||||
return myAppointmentDao.create(someAppointment).getId().getIdPart();
|
||||
someAppointment.setId(id);
|
||||
someAppointment.setAppointmentType(type);
|
||||
myAppointmentDao.update(someAppointment);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testListResourcesGraphqlArgumentConversion() {
|
||||
String appointmentId = createSomeAppointment();
|
||||
createSomeAppointmentWithType("hapi-1", new CodeableConcept(new Coding("TEST_SYSTEM", "TEST_CODE", "TEST_DISPLAY")));
|
||||
|
||||
Argument argument = new Argument("appointment_type", new StringValue("TEST_CODE"));
|
||||
|
||||
|
@ -73,12 +76,12 @@ public class DaoRegistryGraphQLStorageServicesTest extends BaseJpaR4Test {
|
|||
mySvc.listResources(mySrd, "Appointment", Collections.singletonList(argument), result);
|
||||
|
||||
assertFalse(result.isEmpty());
|
||||
assertTrue(result.stream().anyMatch((it) -> it.getIdElement().getIdPart().equals(appointmentId)));
|
||||
assertTrue(result.stream().anyMatch((it) -> it.getIdElement().getIdPart().equals("hapi-1")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testListResourceGraphqlFilterArgument() {
|
||||
String appointmentId = createSomeAppointment();
|
||||
createSomeAppointmentWithType("hapi-1", new CodeableConcept(new Coding("TEST_SYSTEM", "TEST_CODE", "TEST_DISPLAY")));
|
||||
|
||||
Argument argument = new Argument("_filter", new StringValue("appointment-type eq TEST_CODE"));
|
||||
|
||||
|
@ -86,12 +89,12 @@ public class DaoRegistryGraphQLStorageServicesTest extends BaseJpaR4Test {
|
|||
mySvc.listResources(mySrd, "Appointment", Collections.singletonList(argument), result);
|
||||
|
||||
assertFalse(result.isEmpty());
|
||||
assertTrue(result.stream().anyMatch((it) -> it.getIdElement().getIdPart().equals(appointmentId)));
|
||||
assertTrue(result.stream().anyMatch((it) -> it.getIdElement().getIdPart().equals("hapi-1")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testListResourceGraphqlTokenArgumentWithSystem() {
|
||||
String appointmentId = createSomeAppointment();
|
||||
createSomeAppointmentWithType("hapi-1", new CodeableConcept(new Coding("TEST_SYSTEM", "TEST_CODE", "TEST_DISPLAY")));;
|
||||
|
||||
Argument argument = new Argument("appointment_type", new StringValue("TEST_SYSTEM|TEST_CODE"));
|
||||
|
||||
|
@ -99,7 +102,7 @@ public class DaoRegistryGraphQLStorageServicesTest extends BaseJpaR4Test {
|
|||
mySvc.listResources(mySrd, "Appointment", Collections.singletonList(argument), result);
|
||||
|
||||
assertFalse(result.isEmpty());
|
||||
assertTrue(result.stream().anyMatch((it) -> it.getIdElement().getIdPart().equals(appointmentId)));
|
||||
assertTrue(result.stream().anyMatch((it) -> it.getIdElement().getIdPart().equals("hapi-1")));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -109,9 +112,9 @@ public class DaoRegistryGraphQLStorageServicesTest extends BaseJpaR4Test {
|
|||
List<IBaseResource> result = new ArrayList<>();
|
||||
try {
|
||||
mySvc.listResources(mySrd, "Appointment", Collections.singletonList(argument), result);
|
||||
fail();
|
||||
fail("InvalidRequestException should be thrown.");
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(Msg.code(1275) + "Unknown GraphQL argument \"test\". Value GraphQL argument for this type are: [_content, _id, _lastUpdated, _profile, _security, _source, _tag, _text, actor, appointment_type, based_on, date, identifier, location, part_status, patient, practitioner, reason_code, reason_reference, service_category, service_type, slot, specialty, status, supporting_info]", e.getMessage());
|
||||
assertTrue(e.getMessage().contains("Unknown GraphQL argument \"test\"."));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -176,4 +179,64 @@ public class DaoRegistryGraphQLStorageServicesTest extends BaseJpaR4Test {
|
|||
List<String> expectedId2 = Arrays.asList("hapi-5", "hapi-6", "hapi-7", "hapi-8", "hapi-9");
|
||||
assertTrue(result2.stream().allMatch((it) -> expectedId2.contains(it.getIdElement().getIdPart())));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearch() {
|
||||
createSomePatientWithId("hapi-1");
|
||||
|
||||
List<Argument> arguments = Collections.emptyList();
|
||||
IBaseBundle bundle = mySvc.search(mySrd, "Patient", arguments);
|
||||
|
||||
List<String> result = toUnqualifiedVersionlessIdValues(bundle);
|
||||
assertEquals(1, result.size());
|
||||
assertEquals("Patient/hapi-1", result.get(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchNextPage() throws URISyntaxException {
|
||||
createSomePatientWithId("hapi-1");
|
||||
createSomePatientWithId("hapi-2");
|
||||
createSomePatientWithId("hapi-3");
|
||||
|
||||
List<Argument> arguments = Collections.singletonList(new Argument("_count", new StringValue("1")));
|
||||
IBaseBundle bundle = mySvc.search(mySrd, "Patient", arguments);
|
||||
|
||||
Optional<String> nextUrl = Optional.ofNullable(BundleUtil.getLinkUrlOfType(myFhirContext, bundle, "next"));
|
||||
assertTrue(nextUrl.isPresent());
|
||||
|
||||
List<NameValuePair> params = URLEncodedUtils.parse(new URI(nextUrl.get()), StandardCharsets.UTF_8);
|
||||
Optional<String> cursorId = params.stream()
|
||||
.filter(it -> SEARCH_ID_PARAM.equals(it.getName()))
|
||||
.map(NameValuePair::getValue)
|
||||
.findAny();
|
||||
Optional<String> cursorOffset = params.stream()
|
||||
.filter(it -> SEARCH_OFFSET_PARAM.equals(it.getName()))
|
||||
.map(NameValuePair::getValue)
|
||||
.findAny();
|
||||
|
||||
assertTrue(cursorId.isPresent());
|
||||
assertTrue(cursorOffset.isPresent());
|
||||
|
||||
List<Argument> nextArguments = Arrays.asList(
|
||||
new Argument(SEARCH_ID_PARAM, new StringValue(cursorId.get())),
|
||||
new Argument(SEARCH_OFFSET_PARAM, new StringValue(cursorOffset.get()))
|
||||
);
|
||||
|
||||
Optional<IBaseBundle> nextBundle = Optional.ofNullable(mySvc.search(mySrd, "Patient", nextArguments));
|
||||
assertTrue(nextBundle.isPresent());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchInvalidCursor() {
|
||||
try {
|
||||
List<Argument> arguments = Arrays.asList(
|
||||
new Argument(SEARCH_ID_PARAM, new StringValue("invalid-search-id")),
|
||||
new Argument(SEARCH_OFFSET_PARAM, new StringValue("0"))
|
||||
);
|
||||
mySvc.search(mySrd, "Patient", arguments);
|
||||
fail("InvalidRequestException should be thrown.");
|
||||
} catch (InvalidRequestException e) {
|
||||
assertTrue(e.getMessage().contains("GraphQL Cursor \"invalid-search-id\" does not exist and may have expired"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,8 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.blankOrNullString;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -47,6 +49,7 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
super.before();
|
||||
myStorageSvc.setMinimumBinarySize(10);
|
||||
myDaoConfig.setExpungeEnabled(true);
|
||||
|
||||
myInterceptorRegistry.registerInterceptor(myBinaryStorageInterceptor);
|
||||
}
|
||||
|
||||
|
@ -56,7 +59,8 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
super.after();
|
||||
myStorageSvc.setMinimumBinarySize(0);
|
||||
myDaoConfig.setExpungeEnabled(new DaoConfig().isExpungeEnabled());
|
||||
myBinaryStorageInterceptor.setAutoDeExternalizeMaximumBytes(new BinaryStorageInterceptor().getAutoDeExternalizeMaximumBytes());
|
||||
myBinaryStorageInterceptor.setAutoInflateBinariesMaximumSize(new BinaryStorageInterceptor().getAutoInflateBinariesMaximumSize());
|
||||
myBinaryStorageInterceptor.setAllowAutoInflateBinaries(new BinaryStorageInterceptor().isAllowAutoInflateBinaries());
|
||||
|
||||
MemoryBinaryStorageSvcImpl binaryStorageSvc = (MemoryBinaryStorageSvcImpl) myBinaryStorageSvc;
|
||||
binaryStorageSvc.clear();
|
||||
|
@ -109,6 +113,29 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateAndRetrieveBinary_ServerAssignedId_ExternalizedBinary_DoNotRehydrate() {
|
||||
myBinaryStorageInterceptor.setAllowAutoInflateBinaries(false);
|
||||
|
||||
// Create a resource with a big enough binary
|
||||
Binary binary = new Binary();
|
||||
binary.setContentType("application/octet-stream");
|
||||
binary.setData(SOME_BYTES);
|
||||
DaoMethodOutcome outcome = myBinaryDao.create(binary);
|
||||
|
||||
// Make sure it was externalized
|
||||
IIdType id = outcome.getId().toUnqualifiedVersionless();
|
||||
String encoded = myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getResource());
|
||||
ourLog.info("Encoded: {}", encoded);
|
||||
assertThat(encoded, containsString(HapiExtensions.EXT_EXTERNALIZED_BINARY_ID));
|
||||
assertThat(encoded, not(containsString("\"data\"")));
|
||||
|
||||
// Now read it back and make sure it was not de-externalized
|
||||
Binary output = myBinaryDao.read(id, mySrd);
|
||||
assertEquals("application/octet-stream", output.getContentType());
|
||||
assertArrayEquals(null, output.getData());
|
||||
assertThat(output.getDataElement().getExtensionByUrl(HapiExtensions.EXT_EXTERNALIZED_BINARY_ID), is(notNullValue()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateAndRetrieveBinary_ServerAssignedId_NonExternalizedBinary() {
|
||||
|
@ -294,7 +321,7 @@ public class BinaryStorageInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
@Test
|
||||
public void testRetrieveBinaryAboveRetrievalThreshold() {
|
||||
myBinaryStorageInterceptor.setAutoDeExternalizeMaximumBytes(5);
|
||||
myBinaryStorageInterceptor.setAutoInflateBinariesMaximumSize(5);
|
||||
|
||||
// Create a resource with a big enough binary
|
||||
Binary binary = new Binary();
|
||||
|
|
|
@ -1,15 +1,27 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
|
@ -18,17 +30,23 @@ import org.hl7.fhir.r4.model.Condition;
|
|||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Date;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
@ -36,6 +54,11 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
@SuppressWarnings("Duplicates")
|
||||
public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Test implements ITestDataBuilder {
|
||||
|
||||
@Autowired
|
||||
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||
@Autowired
|
||||
private IBulkDataExportJobSchedulingHelper myBulkDataExportJobSchedulingHelper;
|
||||
|
||||
@Override
|
||||
@AfterEach
|
||||
public void after() throws Exception {
|
||||
|
@ -370,6 +393,56 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
|
|||
assertThat(response.getEntry(), hasSize(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBulkExportForDifferentPartitions() throws IOException {
|
||||
setBulkDataExportProvider();
|
||||
testBulkExport(TENANT_A);
|
||||
testBulkExport(TENANT_B);
|
||||
testBulkExport(JpaConstants.DEFAULT_PARTITION_NAME);
|
||||
}
|
||||
|
||||
private void testBulkExport(String createInPartition) throws IOException {
|
||||
// Create a patient
|
||||
IBaseResource patientA = buildPatient(withActiveTrue());
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setTenantId(createInPartition);
|
||||
myPatientDao.create((Patient) patientA, requestDetails);
|
||||
|
||||
// Create a bulk job
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options, false, requestDetails);
|
||||
assertNotNull(jobDetails.getJobId());
|
||||
|
||||
// Run a scheduled pass to build the export and wait for completion
|
||||
myBulkDataExportJobSchedulingHelper.startSubmittedJobs();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(
|
||||
BatchConstants.BULK_EXPORT_JOB_NAME
|
||||
);
|
||||
|
||||
//perform export-poll-status
|
||||
HttpGet get = new HttpGet(buildExportUrl(createInPartition, jobDetails.getJobId()));
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
|
||||
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
BulkExportResponseJson responseJson = JsonUtil.deserialize(responseString, BulkExportResponseJson.class);
|
||||
assertThat(responseJson.getOutput().get(0).getUrl(), containsString(JpaConstants.DEFAULT_PARTITION_NAME + "/Binary/"));
|
||||
}
|
||||
}
|
||||
|
||||
private void setBulkDataExportProvider() {
|
||||
BulkDataExportProvider provider = new BulkDataExportProvider();
|
||||
provider.setBulkDataExportSvcForUnitTests(myBulkDataExportSvc);
|
||||
provider.setFhirContextForUnitTest(myFhirContext);
|
||||
ourRestServer.registerProvider(provider);
|
||||
}
|
||||
|
||||
private String buildExportUrl(String createInPartition, String jobId) {
|
||||
return myClient.getServerBase() + "/" + createInPartition + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?"
|
||||
+ JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + jobId;
|
||||
}
|
||||
|
||||
private void createConditionWithAllowedUnqualified(IIdType idA) {
|
||||
myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
|
||||
IIdType idB = createResource("Condition", withTenant(TENANT_A), withObservationCode("http://cs", "A"));
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -54,6 +54,7 @@ import java.util.IdentityHashMap;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
|
|
@ -22,7 +22,9 @@ package ca.uhn.fhir.rest.server.util;
|
|||
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.context.phonetic.IPhoneticEncoder;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -40,6 +42,7 @@ public interface ISearchParamRegistry {
|
|||
*/
|
||||
RuntimeSearchParam getActiveSearchParam(String theResourceName, String theParamName);
|
||||
|
||||
|
||||
/**
|
||||
* @return Returns all active search params for the given resource
|
||||
*/
|
||||
|
@ -57,7 +60,6 @@ public interface ISearchParamRegistry {
|
|||
default void requestRefresh() {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* When indexing a HumanName, if a StringEncoder is set in the context, then the "phonetic" search parameter will normalize
|
||||
* the String using this encoder.
|
||||
|
@ -101,4 +103,22 @@ public interface ISearchParamRegistry {
|
|||
@Nullable
|
||||
RuntimeSearchParam getActiveSearchParamByUrl(String theUrl);
|
||||
|
||||
/**
|
||||
* Find a search param for a resource. First, check the resource itself, then check the top-level `Resource` resource.
|
||||
*
|
||||
* @param theResourceType the resource type.
|
||||
* @param theParamName the search parameter name.
|
||||
*
|
||||
* @return the {@link RuntimeSearchParam} that is found.
|
||||
*/
|
||||
default RuntimeSearchParam getRuntimeSearchParam(String theResourceType, String theParamName) {
|
||||
RuntimeSearchParam availableSearchParamDef = getActiveSearchParam(theResourceType, theParamName);
|
||||
if (availableSearchParamDef == null) {
|
||||
availableSearchParamDef = getActiveSearchParam("Resource", theParamName);
|
||||
}
|
||||
if (availableSearchParamDef == null) {
|
||||
throw new InvalidRequestException(Msg.code(1209) + "Unknown parameter name: " + theResourceType + ':' + theParamName);
|
||||
}
|
||||
return availableSearchParamDef;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.batch2.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server - Batch2 Task Processor
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
|
||||
public interface IJobCompletionHandler<PT extends IModelJson> {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,11 +4,13 @@ import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
|
|||
import ca.uhn.fhir.jpa.api.model.WarmCacheEntry;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
|
@ -298,6 +300,20 @@ public class DaoConfig {
|
|||
*/
|
||||
private boolean myConcurrentBundleValidation;
|
||||
|
||||
/**
|
||||
* Since 6.0.0
|
||||
*/
|
||||
private boolean myAllowAutoInflateBinaries = true;
|
||||
/**
|
||||
* Since 6.0.0
|
||||
*/
|
||||
private long myAutoInflateBinariesMaximumBytes = 10 * FileUtils.ONE_MB;
|
||||
|
||||
/**
|
||||
* Since 6.0.0
|
||||
*/
|
||||
private int myBulkExportFileRetentionPeriodHours = 2;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -2793,12 +2809,82 @@ public class DaoConfig {
|
|||
* This setting indicates if a cross-partition subscription can be made.
|
||||
*
|
||||
* @see ModelConfig#setCrossPartitionSubscription(boolean)
|
||||
* @since 7.5.0
|
||||
* @since 5.7.0
|
||||
*/
|
||||
public void setCrossPartitionSubscription(boolean theAllowCrossPartitionSubscription) {
|
||||
this.myModelConfig.setCrossPartitionSubscription(theAllowCrossPartitionSubscription);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* This setting indicates whether binaries are allowed to be automatically inflated from external storage during requests.
|
||||
* Default is true.
|
||||
*
|
||||
* @since 6.0.0
|
||||
* @return whether binaries are allowed to be automatically inflated from external storage during requests.
|
||||
*/
|
||||
public boolean isAllowAutoInflateBinaries() {
|
||||
return myAllowAutoInflateBinaries;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This setting indicates whether binaries are allowed to be automatically inflated from external storage during requests.
|
||||
* Default is true.
|
||||
*
|
||||
* @since 6.0.0
|
||||
* @param theAllowAutoDeExternalizingBinaries the value to set.
|
||||
*/
|
||||
public void setAllowAutoInflateBinaries(boolean theAllowAutoDeExternalizingBinaries) {
|
||||
myAllowAutoInflateBinaries = theAllowAutoDeExternalizingBinaries;
|
||||
}
|
||||
|
||||
/**
|
||||
* This setting controls how many bytes of binaries will be automatically inflated from external storage during requests.
|
||||
* which contain binary data.
|
||||
* Default is 10MB
|
||||
*
|
||||
* @since 6.0.0
|
||||
* @param theAutoInflateBinariesMaximumBytes the maximum number of bytes to de-externalize.
|
||||
*/
|
||||
public void setAutoInflateBinariesMaximumBytes(long theAutoInflateBinariesMaximumBytes) {
|
||||
myAutoInflateBinariesMaximumBytes = theAutoInflateBinariesMaximumBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* This setting controls how many bytes of binaries will be automatically inflated from external storage during requests.
|
||||
* which contain binary data.
|
||||
* Default is 10MB
|
||||
*
|
||||
* @since 6.0.0
|
||||
* @return the number of bytes to de-externalize during requests.
|
||||
*/
|
||||
public long getAutoInflateBinariesMaximumBytes() {
|
||||
return myAutoInflateBinariesMaximumBytes;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This setting controls how long Bulk Export collection entities will be retained after job start.
|
||||
* Default is 2 hours. Setting this value to 0 or less will cause Bulk Export collection entities to never be expired.
|
||||
*
|
||||
* @since 6.0.0
|
||||
*/
|
||||
public int getBulkExportFileRetentionPeriodHours() {
|
||||
return myBulkExportFileRetentionPeriodHours;
|
||||
}
|
||||
|
||||
/**
|
||||
* This setting controls how long Bulk Export collection entities will be retained after job start.
|
||||
* Default is 2 hours. Setting this value to 0 or less will cause Bulk Export collection entities to never be expired.
|
||||
*
|
||||
* @since 6.0.0
|
||||
*/
|
||||
public void setBulkExportFileRetentionPeriodHours(int theBulkExportFileRetentionPeriodHours) {
|
||||
myBulkExportFileRetentionPeriodHours = theBulkExportFileRetentionPeriodHours;
|
||||
}
|
||||
|
||||
public enum StoreMetaSourceInformationEnum {
|
||||
NONE(false, false),
|
||||
SOURCE_URI(true, false),
|
||||
|
@ -2889,7 +2975,7 @@ public class DaoConfig {
|
|||
NON_VERSIONED,
|
||||
|
||||
/**
|
||||
* Tags are stored directly in the resource body (in the {@link ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable}
|
||||
* Tags are stored directly in the resource body (in the {@link ResourceHistoryTable}
|
||||
* entry for the resource, meaning that they are not indexed separately, and are versioned with the rest
|
||||
* of the resource.
|
||||
*/
|
||||
|
|
|
@ -76,22 +76,23 @@ public class BinaryStorageInterceptor {
|
|||
private BinaryAccessProvider myBinaryAccessProvider;
|
||||
private Class<? extends IPrimitiveType<byte[]>> myBinaryType;
|
||||
private String myDeferredListKey;
|
||||
private long myAutoDeExternalizeMaximumBytes = 10 * FileUtils.ONE_MB;
|
||||
private long myAutoInflateBinariesMaximumBytes = 10 * FileUtils.ONE_MB;
|
||||
private boolean myAllowAutoInflateBinaries = true;
|
||||
|
||||
/**
|
||||
* Any externalized binaries will be rehydrated if their size is below this thhreshold when
|
||||
* reading the resource back. Default is 10MB.
|
||||
*/
|
||||
public long getAutoDeExternalizeMaximumBytes() {
|
||||
return myAutoDeExternalizeMaximumBytes;
|
||||
public long getAutoInflateBinariesMaximumSize() {
|
||||
return myAutoInflateBinariesMaximumBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Any externalized binaries will be rehydrated if their size is below this thhreshold when
|
||||
* reading the resource back. Default is 10MB.
|
||||
*/
|
||||
public void setAutoDeExternalizeMaximumBytes(long theAutoDeExternalizeMaximumBytes) {
|
||||
myAutoDeExternalizeMaximumBytes = theAutoDeExternalizeMaximumBytes;
|
||||
public void setAutoInflateBinariesMaximumSize(long theAutoInflateBinariesMaximumBytes) {
|
||||
myAutoInflateBinariesMaximumBytes = theAutoInflateBinariesMaximumBytes;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -248,6 +249,10 @@ public class BinaryStorageInterceptor {
|
|||
|
||||
@Hook(Pointcut.STORAGE_PRESHOW_RESOURCES)
|
||||
public void preShow(IPreResourceShowDetails theDetails) throws IOException {
|
||||
if (!isAllowAutoInflateBinaries()) {
|
||||
return;
|
||||
}
|
||||
|
||||
long unmarshalledByteCount = 0;
|
||||
|
||||
for (IBaseResource nextResource : theDetails) {
|
||||
|
@ -265,7 +270,7 @@ public class BinaryStorageInterceptor {
|
|||
throw new InvalidRequestException(Msg.code(1330) + msg);
|
||||
}
|
||||
|
||||
if ((unmarshalledByteCount + blobDetails.getBytes()) < myAutoDeExternalizeMaximumBytes) {
|
||||
if ((unmarshalledByteCount + blobDetails.getBytes()) < myAutoInflateBinariesMaximumBytes) {
|
||||
|
||||
byte[] bytes = myBinaryStorageSvc.fetchBlob(resourceId, attachmentId.get());
|
||||
nextTarget.setData(bytes);
|
||||
|
@ -295,6 +300,14 @@ public class BinaryStorageInterceptor {
|
|||
return binaryTargets;
|
||||
}
|
||||
|
||||
public void setAllowAutoInflateBinaries(boolean theAllowAutoInflateBinaries) {
|
||||
myAllowAutoInflateBinaries = theAllowAutoInflateBinaries;
|
||||
}
|
||||
|
||||
public boolean isAllowAutoInflateBinaries() {
|
||||
return myAllowAutoInflateBinaries;
|
||||
}
|
||||
|
||||
private static class DeferredBinaryTarget {
|
||||
private final String myBlobId;
|
||||
private final IBinaryTarget myBinaryTarget;
|
||||
|
|
|
@ -56,6 +56,7 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
|
||||
|
@ -105,6 +106,15 @@ public class BulkDataExportProvider {
|
|||
return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/");
|
||||
}
|
||||
|
||||
private String getDefaultPartitionServerBase(ServletRequestDetails theRequestDetails) {
|
||||
if (theRequestDetails.getTenantId() == null || theRequestDetails.getTenantId().equals(JpaConstants.DEFAULT_PARTITION_NAME)) {
|
||||
return getServerBase(theRequestDetails);
|
||||
}
|
||||
else {
|
||||
return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest().replace(theRequestDetails.getTenantId(), JpaConstants.DEFAULT_PARTITION_NAME), "/");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Group/Id/$export
|
||||
*/
|
||||
|
@ -135,6 +145,7 @@ public class BulkDataExportProvider {
|
|||
private void validateResourceTypesAllContainPatientSearchParams(Set<String> theResourceTypes) {
|
||||
if (theResourceTypes != null) {
|
||||
List<String> badResourceTypes = theResourceTypes.stream()
|
||||
.filter(resourceType -> !PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(resourceType))
|
||||
.filter(resourceType -> !myBulkDataExportSvc.getPatientCompartmentResources().contains(resourceType))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
|
@ -195,7 +206,7 @@ public class BulkDataExportProvider {
|
|||
bulkResponseDocument.setTransactionTime(status.getStatusTime());
|
||||
bulkResponseDocument.setRequest(status.getRequest());
|
||||
for (IBulkDataExportSvc.FileEntry nextFile : status.getFiles()) {
|
||||
String serverBase = getServerBase(theRequestDetails);
|
||||
String serverBase = getDefaultPartitionServerBase(theRequestDetails);
|
||||
String nextUrl = serverBase + "/" + nextFile.getResourceId().toUnqualifiedVersionless().getValue();
|
||||
bulkResponseDocument
|
||||
.addOutput()
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -47,16 +48,14 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
private DaoConfig myDaoConfig;
|
||||
|
||||
private final String myResourceName;
|
||||
private final Long myResourceId;
|
||||
private final Long myVersion;
|
||||
private final ResourcePersistentId myResourceId;
|
||||
private final ExpungeOptions myExpungeOptions;
|
||||
private final RequestDetails myRequestDetails;
|
||||
private final AtomicInteger myRemainingCount;
|
||||
|
||||
public ExpungeOperation(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
public ExpungeOperation(String theResourceName, ResourcePersistentId theResourceId, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
myResourceName = theResourceName;
|
||||
myResourceId = theResourceId;
|
||||
myVersion = theVersion;
|
||||
myExpungeOptions = theExpungeOptions;
|
||||
myRequestDetails = theRequestDetails;
|
||||
myRemainingCount = new AtomicInteger(myExpungeOptions.getLimit());
|
||||
|
@ -64,7 +63,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
|
||||
@Override
|
||||
public ExpungeOutcome call() {
|
||||
if (myExpungeOptions.isExpungeDeletedResources() && myVersion == null) {
|
||||
if (myExpungeOptions.isExpungeDeletedResources() && (myResourceId == null || myResourceId.getVersion() == null)) {
|
||||
expungeDeletedResources();
|
||||
if (expungeLimitReached()) {
|
||||
return expungeOutcome();
|
||||
|
@ -82,7 +81,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
}
|
||||
|
||||
private void expungeDeletedResources() {
|
||||
List<Long> resourceIds = findHistoricalVersionsOfDeletedResources();
|
||||
List<ResourcePersistentId> resourceIds = findHistoricalVersionsOfDeletedResources();
|
||||
|
||||
deleteHistoricalVersions(resourceIds);
|
||||
if (expungeLimitReached()) {
|
||||
|
@ -92,14 +91,14 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
deleteCurrentVersionsOfDeletedResources(resourceIds);
|
||||
}
|
||||
|
||||
private List<Long> findHistoricalVersionsOfDeletedResources() {
|
||||
List<Long> retVal = myExpungeDaoService.findHistoricalVersionsOfDeletedResources(myResourceName, myResourceId, myRemainingCount.get());
|
||||
private List<ResourcePersistentId> findHistoricalVersionsOfDeletedResources() {
|
||||
List<ResourcePersistentId> retVal = myExpungeDaoService.findHistoricalVersionsOfDeletedResources(myResourceName, myResourceId, myRemainingCount.get());
|
||||
ourLog.debug("Found {} historical versions", retVal.size());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private List<Long> findHistoricalVersionsOfNonDeletedResources() {
|
||||
return myExpungeDaoService.findHistoricalVersionsOfNonDeletedResources(myResourceName, myResourceId, myVersion, myRemainingCount.get());
|
||||
private List<ResourcePersistentId> findHistoricalVersionsOfNonDeletedResources() {
|
||||
return myExpungeDaoService.findHistoricalVersionsOfNonDeletedResources(myResourceName, myResourceId, myRemainingCount.get());
|
||||
}
|
||||
|
||||
private boolean expungeLimitReached() {
|
||||
|
@ -111,7 +110,7 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
}
|
||||
|
||||
private void expungeOldVersions() {
|
||||
List<Long> historicalIds = findHistoricalVersionsOfNonDeletedResources();
|
||||
List<ResourcePersistentId> historicalIds = findHistoricalVersionsOfNonDeletedResources();
|
||||
|
||||
getPartitionRunner().runInPartitionedThreads(historicalIds, partition -> myExpungeDaoService.expungeHistoricalVersions(myRequestDetails, partition, myRemainingCount));
|
||||
}
|
||||
|
@ -120,11 +119,11 @@ public class ExpungeOperation implements Callable<ExpungeOutcome> {
|
|||
return new PartitionRunner(PROCESS_NAME, THREAD_PREFIX, myDaoConfig.getExpungeBatchSize(), myDaoConfig.getExpungeThreadCount());
|
||||
}
|
||||
|
||||
private void deleteCurrentVersionsOfDeletedResources(List<Long> theResourceIds) {
|
||||
private void deleteCurrentVersionsOfDeletedResources(List<ResourcePersistentId> theResourceIds) {
|
||||
getPartitionRunner().runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeCurrentVersionOfResources(myRequestDetails, partition, myRemainingCount));
|
||||
}
|
||||
|
||||
private void deleteHistoricalVersions(List<Long> theResourceIds) {
|
||||
private void deleteHistoricalVersions(List<ResourcePersistentId> theResourceIds) {
|
||||
getPartitionRunner().runInPartitionedThreads(theResourceIds, partition -> myExpungeDaoService.expungeHistoricalVersionsOfIds(myRequestDetails, partition, myRemainingCount));
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.i18n.Msg;
|
|||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -42,28 +43,28 @@ public class ExpungeService {
|
|||
@Autowired
|
||||
private ApplicationContext myApplicationContext;
|
||||
|
||||
protected ExpungeOperation getExpungeOperation(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
return myApplicationContext.getBean(ExpungeOperation.class, theResourceName, theResourceId, theVersion, theExpungeOptions, theRequestDetails);
|
||||
protected ExpungeOperation getExpungeOperation(String theResourceName, ResourcePersistentId theResourceId, ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
return myApplicationContext.getBean(ExpungeOperation.class, theResourceName, theResourceId, theExpungeOptions, theRequestDetails);
|
||||
}
|
||||
|
||||
public ExpungeOutcome expunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
|
||||
ourLog.info("Expunge: ResourceName[{}] Id[{}] Version[{}] Options[{}]", theResourceName, theResourceId, theVersion, theExpungeOptions);
|
||||
public ExpungeOutcome expunge(String theResourceName, ResourcePersistentId theResourceId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
|
||||
ourLog.info("Expunge: ResourceName[{}] Id[{}] Version[{}] Options[{}]", theResourceName, theResourceId != null ? theResourceId.getId() : null, theResourceId != null ? theResourceId.getVersion() : null, theExpungeOptions);
|
||||
|
||||
if (theExpungeOptions.getLimit() < 1) {
|
||||
throw new InvalidRequestException(Msg.code(1087) + "Expunge limit may not be less than 1. Received expunge limit " + theExpungeOptions.getLimit() + ".");
|
||||
}
|
||||
|
||||
if (theResourceName == null && theResourceId == null && theVersion == null) {
|
||||
if (theResourceName == null && (theResourceId == null || (theResourceId.getId() == null && theResourceId.getVersion() == null))) {
|
||||
if (theExpungeOptions.isExpungeEverything()) {
|
||||
myExpungeEverythingService.expungeEverything(theRequest);
|
||||
}
|
||||
}
|
||||
|
||||
ExpungeOperation expungeOperation = getExpungeOperation(theResourceName, theResourceId, theVersion, theExpungeOptions, theRequest);
|
||||
ExpungeOperation expungeOperation = getExpungeOperation(theResourceName, theResourceId, theExpungeOptions, theRequest);
|
||||
return expungeOperation.call();
|
||||
}
|
||||
|
||||
public void deleteAllSearchParams(Long theResourceId) {
|
||||
public void deleteAllSearchParams(ResourcePersistentId theResourceId) {
|
||||
myExpungeDaoService.deleteAllSearchParams(theResourceId);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.dao.expunge;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -21,20 +21,21 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public interface IResourceExpungeService {
|
||||
List<Long> findHistoricalVersionsOfDeletedResources(String theResourceName, Long theResourceId, int theI);
|
||||
List<ResourcePersistentId> findHistoricalVersionsOfDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theI);
|
||||
|
||||
List<Long> findHistoricalVersionsOfNonDeletedResources(String theResourceName, Long theResourceId, Long theVersion, int theI);
|
||||
List<ResourcePersistentId> findHistoricalVersionsOfNonDeletedResources(String theResourceName, ResourcePersistentId theResourceId, int theI);
|
||||
|
||||
void expungeHistoricalVersions(RequestDetails theRequestDetails, List<Long> thePartition, AtomicInteger theRemainingCount);
|
||||
void expungeHistoricalVersions(RequestDetails theRequestDetails, List<ResourcePersistentId> thePartition, AtomicInteger theRemainingCount);
|
||||
|
||||
void expungeCurrentVersionOfResources(RequestDetails theRequestDetails, List<Long> theResourceIds, AtomicInteger theRemainingCount);
|
||||
void expungeCurrentVersionOfResources(RequestDetails theRequestDetails, List<ResourcePersistentId> theResourceIds, AtomicInteger theRemainingCount);
|
||||
|
||||
void expungeHistoricalVersionsOfIds(RequestDetails theRequestDetails, List<Long> theResourceIds, AtomicInteger theRemainingCount);
|
||||
void expungeHistoricalVersionsOfIds(RequestDetails theRequestDetails, List<ResourcePersistentId> theResourceIds, AtomicInteger theRemainingCount);
|
||||
|
||||
void deleteAllSearchParams(Long theResourceId);
|
||||
void deleteAllSearchParams(ResourcePersistentId theResourceId);
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* HAPI FHIR Storage api
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -57,7 +58,7 @@ public class PartitionRunner {
|
|||
myThreadCount = theThreadCount;
|
||||
}
|
||||
|
||||
public void runInPartitionedThreads(List<Long> theResourceIds, Consumer<List<Long>> partitionConsumer) {
|
||||
public void runInPartitionedThreads(List<ResourcePersistentId> theResourceIds, Consumer<List<ResourcePersistentId>> partitionConsumer) {
|
||||
|
||||
List<Callable<Void>> callableTasks = buildCallableTasks(theResourceIds, partitionConsumer);
|
||||
if (callableTasks.size() == 0) {
|
||||
|
@ -92,7 +93,7 @@ public class PartitionRunner {
|
|||
}
|
||||
}
|
||||
|
||||
private List<Callable<Void>> buildCallableTasks(List<Long> theResourceIds, Consumer<List<Long>> partitionConsumer) {
|
||||
private List<Callable<Void>> buildCallableTasks(List<ResourcePersistentId> theResourceIds, Consumer<List<ResourcePersistentId>> partitionConsumer) {
|
||||
List<Callable<Void>> retval = new ArrayList<>();
|
||||
|
||||
if (myBatchSize > theResourceIds.size()) {
|
||||
|
@ -100,9 +101,9 @@ public class PartitionRunner {
|
|||
} else {
|
||||
ourLog.info("Creating batch job of {} entries", theResourceIds.size());
|
||||
}
|
||||
List<List<Long>> partitions = Lists.partition(theResourceIds, myBatchSize);
|
||||
List<List<ResourcePersistentId>> partitions = Lists.partition(theResourceIds, myBatchSize);
|
||||
|
||||
for (List<Long> nextPartition : partitions) {
|
||||
for (List<ResourcePersistentId> nextPartition : partitions) {
|
||||
if (nextPartition.size() > 0) {
|
||||
Callable<Void> callableTask = () -> {
|
||||
ourLog.info(myProcessName + " {} resources", nextPartition.size());
|
||||
|
|
|
@ -24,10 +24,18 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterOr;
|
||||
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.BundleLinks;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.IVersionSpecificBundleFactory;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.DateOrListParam;
|
||||
|
@ -44,8 +52,9 @@ import ca.uhn.fhir.rest.param.StringOrListParam;
|
|||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.IPagingProvider;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -57,19 +66,27 @@ import org.hl7.fhir.instance.model.api.IIdType;
|
|||
import org.hl7.fhir.utilities.graphql.Argument;
|
||||
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
|
||||
import org.hl7.fhir.utilities.graphql.Value;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.rest.api.Constants.PARAM_COUNT;
|
||||
import static ca.uhn.fhir.rest.api.Constants.PARAM_FILTER;
|
||||
|
||||
public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageServices {
|
||||
|
||||
// the constant hasn't already been defined in org.hl7.fhir.core so we define it here
|
||||
static final String SEARCH_ID_PARAM = "search-id";
|
||||
static final String SEARCH_OFFSET_PARAM = "search-offset";
|
||||
|
||||
private static final int MAX_SEARCH_SIZE = 500;
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
|
@ -77,6 +94,12 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService
|
|||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myPartitionHelperSvc;
|
||||
@Autowired
|
||||
private IPagingProvider myPagingProvider;
|
||||
|
||||
private IFhirResourceDao<? extends IBaseResource> getDao(String theResourceType) {
|
||||
RuntimeResourceDefinition typeDef = myContext.getResourceDefinition(theResourceType);
|
||||
|
@ -95,19 +118,18 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService
|
|||
return name.replaceAll("-", "_");
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public void listResources(Object theAppInfo, String theType, List<Argument> theSearchParams, List<IBaseResource> theMatches) throws FHIRException {
|
||||
private SearchParameterMap buildSearchParams(String theType, List<Argument> theSearchParams) {
|
||||
List<Argument> resourceSearchParam = theSearchParams.stream()
|
||||
.filter(it -> !PARAM_COUNT.equals(it.getName()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
FhirContext fhirContext = myContext;
|
||||
RuntimeResourceDefinition typeDef = fhirContext.getResourceDefinition(theType);
|
||||
IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(typeDef.getImplementingClass());
|
||||
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLoadSynchronousUpTo(MAX_SEARCH_SIZE);
|
||||
|
||||
ResourceSearchParams searchParams = mySearchParamRegistry.getActiveSearchParams(typeDef.getName());
|
||||
|
||||
for (Argument nextArgument : theSearchParams) {
|
||||
for (Argument nextArgument : resourceSearchParam) {
|
||||
|
||||
if (nextArgument.getName().equals(PARAM_FILTER)) {
|
||||
String value = nextArgument.getValues().get(0).getValue();
|
||||
|
@ -189,8 +211,17 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService
|
|||
params.add(searchParamName, queryParam);
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public void listResources(Object theAppInfo, String theType, List<Argument> theSearchParams, List<IBaseResource> theMatches) throws FHIRException {
|
||||
SearchParameterMap params = buildSearchParams(theType, theSearchParams);
|
||||
params.setLoadSynchronousUpTo(MAX_SEARCH_SIZE);
|
||||
|
||||
RequestDetails requestDetails = (RequestDetails) theAppInfo;
|
||||
IBundleProvider response = dao.search(params, requestDetails);
|
||||
IBundleProvider response = getDao(theType).search(params, requestDetails);
|
||||
Integer size = response.size();
|
||||
//We set size to null in SearchCoordinatorSvcImpl.executeQuery() if matching results exceeds count
|
||||
//so don't throw here
|
||||
|
@ -201,7 +232,6 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService
|
|||
|
||||
Validate.notNull(size, "size is null");
|
||||
theMatches.addAll(response.getResources(0, size));
|
||||
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
|
@ -228,10 +258,88 @@ public class DaoRegistryGraphQLStorageServices implements IGraphQLStorageService
|
|||
return new ReferenceResolution(theContext, outcome);
|
||||
}
|
||||
|
||||
private Optional<String> getArgument(List<Argument> params, String name) {
|
||||
return params.stream()
|
||||
.filter(it -> name.equals(it.getName()))
|
||||
.map(it -> it.getValues().get(0).getValue())
|
||||
.findAny();
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public IBaseBundle search(Object theAppInfo, String theType, List<Argument> theSearchParams) throws FHIRException {
|
||||
throw new NotImplementedOperationException(Msg.code(1277) + "Not yet able to handle this GraphQL request");
|
||||
RequestDetails requestDetails = (RequestDetails) theAppInfo;
|
||||
|
||||
Optional<String> searchIdArgument = getArgument(theSearchParams, SEARCH_ID_PARAM);
|
||||
Optional<String> searchOffsetArgument = getArgument(theSearchParams, SEARCH_OFFSET_PARAM);
|
||||
|
||||
String searchId;
|
||||
int searchOffset;
|
||||
int pageSize;
|
||||
IBundleProvider response;
|
||||
|
||||
if (searchIdArgument.isPresent() && searchOffsetArgument.isPresent()) {
|
||||
searchId = searchIdArgument.get();
|
||||
searchOffset = Integer.parseInt(searchOffsetArgument.get());
|
||||
|
||||
response = Optional.ofNullable(myPagingProvider.retrieveResultList(requestDetails, searchId)).orElseThrow(()->{
|
||||
String msg = myContext.getLocalizer().getMessageSanitized(DaoRegistryGraphQLStorageServices.class, "invalidGraphqlCursorArgument", searchId);
|
||||
return new InvalidRequestException(Msg.code(2076) + msg);
|
||||
});
|
||||
|
||||
pageSize = Optional.ofNullable(response.preferredPageSize())
|
||||
.orElseGet(myPagingProvider::getDefaultPageSize);
|
||||
} else {
|
||||
pageSize = getArgument(theSearchParams, "_count").map(Integer::parseInt)
|
||||
.orElseGet(myPagingProvider::getDefaultPageSize);
|
||||
|
||||
SearchParameterMap params = buildSearchParams(theType, theSearchParams);
|
||||
params.setCount(pageSize);
|
||||
|
||||
CacheControlDirective cacheControlDirective = new CacheControlDirective();
|
||||
cacheControlDirective.parse(requestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL));
|
||||
|
||||
RequestPartitionId requestPartitionId = myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(requestDetails, theType, params, null);
|
||||
response = mySearchCoordinatorSvc.registerSearch(getDao(theType), params, theType, cacheControlDirective, requestDetails, requestPartitionId);
|
||||
|
||||
searchOffset = 0;
|
||||
searchId = myPagingProvider.storeResultList(requestDetails, response);
|
||||
}
|
||||
|
||||
|
||||
// response.size() may return {@literal null}, in that case use pageSize
|
||||
String serverBase = requestDetails.getFhirServerBase();
|
||||
Optional<Integer> numTotalResults = Optional.ofNullable(response.size());
|
||||
int numToReturn = numTotalResults.map(integer -> Math.min(pageSize, integer - searchOffset)).orElse(pageSize);
|
||||
|
||||
BundleLinks links = new BundleLinks(requestDetails.getServerBaseForRequest(), null, RestfulServerUtils.prettyPrintResponse(requestDetails.getServer(), requestDetails), BundleTypeEnum.SEARCHSET);
|
||||
|
||||
// RestfulServerUtils.createLinkSelf not suitable here
|
||||
String linkFormat = "%s/%s?_format=application/json&search-id=%s&search-offset=%d&_count=%d";
|
||||
|
||||
String linkSelf = String.format(linkFormat, serverBase, theType, searchId, searchOffset, pageSize);
|
||||
links.setSelf(linkSelf);
|
||||
|
||||
boolean hasNext = numTotalResults.map(total -> (searchOffset + numToReturn) < total).orElse(true);
|
||||
|
||||
if (hasNext) {
|
||||
String linkNext = String.format(linkFormat, serverBase, theType, searchId, searchOffset+numToReturn, pageSize);
|
||||
links.setNext(linkNext);
|
||||
}
|
||||
|
||||
if (searchOffset > 0) {
|
||||
String linkPrev = String.format(linkFormat, serverBase, theType, searchId, Math.max(0, searchOffset-pageSize), pageSize);
|
||||
links.setPrev(linkPrev);
|
||||
}
|
||||
|
||||
List<IBaseResource> resourceList = response.getResources(searchOffset, numToReturn + searchOffset);
|
||||
|
||||
IVersionSpecificBundleFactory bundleFactory = myContext.newBundleFactory();
|
||||
bundleFactory.addRootPropertiesToBundle(response.getUuid(), links, response.size(), response.getPublished());
|
||||
bundleFactory.addResourcesToBundle(resourceList, BundleTypeEnum.SEARCHSET, serverBase, null, null);
|
||||
|
||||
IBaseResource result = bundleFactory.getResourceBundle();
|
||||
return (IBaseBundle) result;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.0.0-PRE10-SNAPSHOT</version>
|
||||
<version>6.0.0-PRE11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue