diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java index 3784fc8a18d..16aa22858d7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/api/IBulkDataExportSvc.java @@ -27,6 +27,7 @@ import javax.transaction.Transactional; import java.util.ArrayList; import java.util.Date; import java.util.List; +import java.util.Set; public interface IBulkDataExportSvc { void buildExportFiles(); @@ -38,6 +39,11 @@ public interface IBulkDataExportSvc { JobInfo getJobInfoOrThrowResourceNotFound(String theJobId); + /** + * Return a set of all resource types which contain search parameters which have Patient as a target. + */ + Set getPatientCompartmentResources(); + void cancelAndPurgeAllJobs(); class JobInfo { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java index 278095ee393..3f2cd98445a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/BaseBulkItemReader.java @@ -186,7 +186,7 @@ public abstract class BaseBulkItemReader implements ItemReader searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); if (searchParams == null || searchParams.size() == 0) { - String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType); + String errorMessage = String.format("Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType); throw new IllegalArgumentException(errorMessage); } else if (searchParams.size() == 1) { patientSearchParam = searchParams.get(0); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java index c5d2ff5c5de..219b4f17368 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/PatientBulkItemReader.java @@ -55,7 +55,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea private RuntimeSearchParam validateSearchParameters(SearchParameterMap expandedSpMap) { RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType(); if (expandedSpMap.get(runtimeSearchParam.getName()) != null) { - throw new IllegalArgumentException(String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName())); + throw new IllegalArgumentException(String.format("Patient Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName())); } return runtimeSearchParam; } @@ -74,7 +74,6 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea List maps = createSearchParameterMapsForResourceType(); String patientSearchParam = getPatientSearchParamForCurrentResourceType().getName(); - for (SearchParameterMap map: maps) { //Ensure users did not monkey with the patient compartment search parameter. validateSearchParameters(map); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java index 5ff7be95bb8..4fde8b3c2f6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/provider/BulkDataExportProvider.java @@ -126,6 +126,7 @@ public class BulkDataExportProvider { @OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType theTypeFilter, ServletRequestDetails theRequestDetails ) { + myBulkDataExportSvc.getPatientCompartmentResources(); validatePreferAsyncHeader(theRequestDetails); BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter); IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java index 610c123527c..6da24977af3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/svc/BulkDataExportSvcImpl.java @@ -21,6 +21,9 @@ package ca.uhn.fhir.jpa.bulk.svc; */ import ca.uhn.fhir.context.FhirContext; +import ca.uhn.fhir.context.RuntimeResourceDefinition; +import ca.uhn.fhir.context.RuntimeSearchParam; +import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.model.ExpungeOptions; @@ -40,12 +43,16 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob; import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.util.JpaConstants; +import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.util.UrlUtil; +import com.google.common.collect.Sets; import org.apache.commons.lang3.time.DateUtils; +import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseBinary; +import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.InstantType; import org.quartz.JobExecutionContext; @@ -64,13 +71,16 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.PostConstruct; import javax.transaction.Transactional; import java.util.Date; -import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP; +import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT; +import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -112,6 +122,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { @Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) private org.springframework.batch.core.Job myPatientBulkExportJob; + private Set myCompartmentResources; + private final int myRetentionPeriod = (int) (2 * DateUtils.MILLIS_PER_HOUR); /** @@ -291,9 +303,9 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { requestBuilder.append("/"); //Prefix the export url with Group/[id]/ or /Patient/ depending on what type of request it is. - if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.GROUP)) { + if (theBulkDataExportOptions.getExportStyle().equals(GROUP)) { requestBuilder.append(theBulkDataExportOptions.getGroupId().toVersionless()).append("/"); - } else if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.PATIENT)) { + } else if (theBulkDataExportOptions.getExportStyle().equals(PATIENT)) { requestBuilder.append("Patient/"); } @@ -313,7 +325,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { .forEach(filter -> requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(escapeUrlParam(filter))); } - if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.GROUP)) { + if (theBulkDataExportOptions.getExportStyle().equals(GROUP)) { requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_GROUP_ID).append("=").append(theBulkDataExportOptions.getGroupId().getValue()); requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(theBulkDataExportOptions.isExpandMdm()); } @@ -338,7 +350,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { // This is probably not a useful default, but having the default be "download the whole // server" seems like a risky default too. We'll deal with that by having the default involve // only returning a small time span - resourceTypes = myContext.getResourceTypes(); + resourceTypes = getAllowedResourceTypesForBulkExportStyle(theBulkDataExportOptions.getExportStyle()); if (since == null) { since = DateUtils.addDays(new Date(), -1); } @@ -437,6 +449,35 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc { return retVal; } + @Override + public Set getPatientCompartmentResources() { + if (myCompartmentResources == null) { + myCompartmentResources = myContext.getResourceTypes().stream() + .filter(this::resourceTypeIsInPatientCompartment) + .collect(Collectors.toSet()); + } + return myCompartmentResources; + } + + /** + * Return true if any search parameter in the resource can point at a patient, false otherwise + */ + private boolean resourceTypeIsInPatientCompartment(String theResourceType) { + RuntimeResourceDefinition runtimeResourceDefinition = myContext.getResourceDefinition(theResourceType); + List searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient"); + return searchParams != null && searchParams.size() >= 1; + } + + public Set getAllowedResourceTypesForBulkExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) { + if (theExportStyle.equals(SYSTEM)) { + return myContext.getResourceTypes(); + } else if (theExportStyle.equals(GROUP) || theExportStyle.equals(PATIENT)) { + return getPatientCompartmentResources(); + } else { + return null; + } + } + private IIdType toId(String theResourceId) { IIdType retVal = myContext.getVersion().newIdType(); retVal.setValue(theResourceId); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java index 26d31c3e11f..eefc4e9afd6 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportSvcImplR4Test.java @@ -936,17 +936,42 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { } @Test - public void testPatientExportWithNoTypesWorks() { + public void testAllExportStylesWorkWithNullResourceTypes() { + createResources(); + myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED); // Create a bulk job BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); bulkDataExportOptions.setOutputFormat(null); - bulkDataExportOptions.setResourceTypes(Sets.newHashSet(myFhirCtx.getResourceTypes()); + bulkDataExportOptions.setResourceTypes(null); bulkDataExportOptions.setSince(null); bulkDataExportOptions.setFilters(null); bulkDataExportOptions.setGroupId(myPatientGroupId); bulkDataExportOptions.setExpandMdm(true); bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT); + + //Patient-style IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + + //Group-style + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); + bulkDataExportOptions.setGroupId(myPatientGroupId); + jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); + + //System-style + bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); + jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions); + myBulkDataExportSvc.buildExportFiles(); + awaitAllBulkJobCompletions(); + jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); + assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); } private void awaitJobCompletion(JobExecution theJobExecution) {