Filter default resource types based on export style

This commit is contained in:
Tadgh 2021-03-10 10:30:54 -05:00
parent 30a48894cc
commit bfb9d0bc36
6 changed files with 82 additions and 10 deletions

View File

@ -27,6 +27,7 @@ import javax.transaction.Transactional;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Set;
public interface IBulkDataExportSvc {
void buildExportFiles();
@ -38,6 +39,11 @@ public interface IBulkDataExportSvc {
JobInfo getJobInfoOrThrowResourceNotFound(String theJobId);
/**
* Return a set of all resource types which contain search parameters which have Patient as a target.
*/
Set<String> getPatientCompartmentResources();
void cancelAndPurgeAllJobs();
class JobInfo {

View File

@ -186,7 +186,7 @@ public abstract class BaseBulkItemReader implements ItemReader<List<ResourcePers
RuntimeSearchParam patientSearchParam;
List<RuntimeSearchParam> searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient");
if (searchParams == null || searchParams.size() == 0) {
String errorMessage = String.format("Resource type [%s] is not eligible for Group Bulk export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType);
String errorMessage = String.format("Resource type [%s] is not eligible for this type of export, as it contains no Patient compartment, and no `patient` or `subject` search parameter", myResourceType);
throw new IllegalArgumentException(errorMessage);
} else if (searchParams.size() == 1) {
patientSearchParam = searchParams.get(0);

View File

@ -55,7 +55,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea
private RuntimeSearchParam validateSearchParameters(SearchParameterMap expandedSpMap) {
RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType();
if (expandedSpMap.get(runtimeSearchParam.getName()) != null) {
throw new IllegalArgumentException(String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName()));
throw new IllegalArgumentException(String.format("Patient Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName()));
}
return runtimeSearchParam;
}
@ -74,7 +74,6 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea
List<SearchParameterMap> maps = createSearchParameterMapsForResourceType();
String patientSearchParam = getPatientSearchParamForCurrentResourceType().getName();
for (SearchParameterMap map: maps) {
//Ensure users did not monkey with the patient compartment search parameter.
validateSearchParameters(map);

View File

@ -126,6 +126,7 @@ public class BulkDataExportProvider {
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theTypeFilter,
ServletRequestDetails theRequestDetails
) {
myBulkDataExportSvc.getPatientCompartmentResources();
validatePreferAsyncHeader(theRequestDetails);
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter);
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions);

View File

@ -21,6 +21,9 @@ package ca.uhn.fhir.jpa.bulk.svc;
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.fhirpath.IFhirPath;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
@ -40,12 +43,16 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.InstantType;
import org.quartz.JobExecutionContext;
@ -64,13 +71,16 @@ import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.PostConstruct;
import javax.transaction.Transactional;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP;
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT;
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM;
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam;
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -112,6 +122,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
@Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myPatientBulkExportJob;
private Set<String> myCompartmentResources;
private final int myRetentionPeriod = (int) (2 * DateUtils.MILLIS_PER_HOUR);
/**
@ -291,9 +303,9 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
requestBuilder.append("/");
//Prefix the export url with Group/[id]/ or /Patient/ depending on what type of request it is.
if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.GROUP)) {
if (theBulkDataExportOptions.getExportStyle().equals(GROUP)) {
requestBuilder.append(theBulkDataExportOptions.getGroupId().toVersionless()).append("/");
} else if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.PATIENT)) {
} else if (theBulkDataExportOptions.getExportStyle().equals(PATIENT)) {
requestBuilder.append("Patient/");
}
@ -313,7 +325,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
.forEach(filter -> requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(escapeUrlParam(filter)));
}
if (theBulkDataExportOptions.getExportStyle().equals(BulkDataExportOptions.ExportStyle.GROUP)) {
if (theBulkDataExportOptions.getExportStyle().equals(GROUP)) {
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_GROUP_ID).append("=").append(theBulkDataExportOptions.getGroupId().getValue());
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(theBulkDataExportOptions.isExpandMdm());
}
@ -338,7 +350,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
// This is probably not a useful default, but having the default be "download the whole
// server" seems like a risky default too. We'll deal with that by having the default involve
// only returning a small time span
resourceTypes = myContext.getResourceTypes();
resourceTypes = getAllowedResourceTypesForBulkExportStyle(theBulkDataExportOptions.getExportStyle());
if (since == null) {
since = DateUtils.addDays(new Date(), -1);
}
@ -437,6 +449,35 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
return retVal;
}
@Override
public Set<String> getPatientCompartmentResources() {
if (myCompartmentResources == null) {
myCompartmentResources = myContext.getResourceTypes().stream()
.filter(this::resourceTypeIsInPatientCompartment)
.collect(Collectors.toSet());
}
return myCompartmentResources;
}
/**
* Return true if any search parameter in the resource can point at a patient, false otherwise
*/
private boolean resourceTypeIsInPatientCompartment(String theResourceType) {
RuntimeResourceDefinition runtimeResourceDefinition = myContext.getResourceDefinition(theResourceType);
List<RuntimeSearchParam> searchParams = runtimeResourceDefinition.getSearchParamsForCompartmentName("Patient");
return searchParams != null && searchParams.size() >= 1;
}
public Set<String> getAllowedResourceTypesForBulkExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) {
if (theExportStyle.equals(SYSTEM)) {
return myContext.getResourceTypes();
} else if (theExportStyle.equals(GROUP) || theExportStyle.equals(PATIENT)) {
return getPatientCompartmentResources();
} else {
return null;
}
}
private IIdType toId(String theResourceId) {
IIdType retVal = myContext.getVersion().newIdType();
retVal.setValue(theResourceId);

View File

@ -936,17 +936,42 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
}
@Test
public void testPatientExportWithNoTypesWorks() {
public void testAllExportStylesWorkWithNullResourceTypes() {
createResources();
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED);
// Create a bulk job
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setOutputFormat(null);
bulkDataExportOptions.setResourceTypes(Sets.newHashSet(myFhirCtx.getResourceTypes());
bulkDataExportOptions.setResourceTypes(null);
bulkDataExportOptions.setSince(null);
bulkDataExportOptions.setFilters(null);
bulkDataExportOptions.setGroupId(myPatientGroupId);
bulkDataExportOptions.setExpandMdm(true);
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
//Patient-style
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions();
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE)));
//Group-style
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
bulkDataExportOptions.setGroupId(myPatientGroupId);
jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions();
jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE)));
//System-style
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions();
jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE)));
}
private void awaitJobCompletion(JobExecution theJobExecution) {