6530 refactor export operation (#6531)

extract reusable methods out of bulk export provider

---------

Co-authored-by: Vadim Karantayer <vadim.karantayer@smilecdr.com>
This commit is contained in:
Aditya Dave 2024-12-12 13:58:55 -05:00 committed by GitHub
parent b904aac3c9
commit 2d740f8d85
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 957 additions and 376 deletions

View File

@ -21,11 +21,15 @@ package ca.uhn.fhir.util;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class DatatypeUtil {
private DatatypeUtil() {
// non-instantiable
}
/**
* Convert a list of FHIR String objects to a set of native java Strings
@ -74,4 +78,12 @@ public class DatatypeUtil {
public static Boolean toBooleanValue(IPrimitiveType<Boolean> thePrimitiveType) {
return thePrimitiveType != null ? thePrimitiveType.getValue() : null;
}
/**
* Returns {@link IPrimitiveType#getValue()} if <code>thePrimitiveType</code> is
* not null, else returns null.
*/
public static Date toDateValue(IPrimitiveType<Date> thePrimitiveType) {
return thePrimitiveType != null ? thePrimitiveType.getValue() : null;
}
}

View File

@ -442,12 +442,12 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
*/
@SuppressWarnings("unchecked")
private List<JpaPid> getMembersFromGroupWithFilter(
ExportPIDIteratorParameters theParameters, boolean theConsiderSince) throws IOException {
ExportPIDIteratorParameters theParameters, boolean theConsiderDateRange) throws IOException {
RuntimeResourceDefinition def = myContext.getResourceDefinition("Patient");
List<JpaPid> resPids = new ArrayList<>();
List<SearchParameterMap> maps =
myBulkExportHelperSvc.createSearchParameterMapsForResourceType(def, theParameters, theConsiderSince);
List<SearchParameterMap> maps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(
def, theParameters, theConsiderDateRange);
maps.forEach(map -> addMembershipToGroupClause(map, theParameters.getGroupId()));

View File

@ -0,0 +1,84 @@
package ca.uhn.fhir.jpa.bulk.export.svc;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.util.DateUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.Date;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verifyNoMoreInteractions;
@ExtendWith(MockitoExtension.class)
class BulkExportHelperServiceTest {
@Mock
private MatchUrlService myMatchUrlService;
@Mock
private FhirContext myContext;
@InjectMocks
private BulkExportHelperService myFixture;
@AfterEach
void tearDown() {
verifyNoMoreInteractions(myMatchUrlService, myContext);
}
@Test
void addLastUpdatedFilterShouldBeNullWhenNoDatesGiven() {
// Arrange
final SearchParameterMap searchParameterMap = new SearchParameterMap();
// Act
myFixture.addLastUpdatedFilter(searchParameterMap, null, null);
// Assert
assertThat(searchParameterMap.getLastUpdated()).isNull();
}
@Test
void addLastUpdatedFilterShouldContainsStartDateWhenStartDateIsGiven() {
// Arrange
final SearchParameterMap searchParameterMap = new SearchParameterMap();
final Date startDate = new Date();
final DateRangeParam expected = new DateRangeParam(startDate, null);
// Act
myFixture.addLastUpdatedFilter(searchParameterMap, startDate, null);
// Assert
assertThat(searchParameterMap.getLastUpdated()).isEqualTo(expected);
}
@Test
void addLastUpdatedFilterShouldContainsEndDateWhenEndDateIsGiven() {
// Arrange
final SearchParameterMap searchParameterMap = new SearchParameterMap();
final Date endDate = new Date();
final DateRangeParam expected = new DateRangeParam(null, endDate);
// Act
myFixture.addLastUpdatedFilter(searchParameterMap, null, endDate);
// Assert
assertThat(searchParameterMap.getLastUpdated()).isEqualTo(expected);
}
@Test
void addLastUpdatedFilterShouldContainsDateRangeWhenStartAndEndDateIsGiven() {
// Arrange
final SearchParameterMap searchParameterMap = new SearchParameterMap();
final Date startDate = new Date();
final Date endDate = DateUtils.getEndOfDay(startDate);
final DateRangeParam expected = new DateRangeParam(startDate, endDate);
// Act
myFixture.addLastUpdatedFilter(searchParameterMap, startDate, endDate);
// Assert
assertThat(searchParameterMap.getLastUpdated()).isEqualTo(expected);
}
}

View File

@ -41,14 +41,21 @@ public class BulkExportJobParameters extends BaseBatchJobParameters {
private List<String> myResourceTypes;
/**
* The start date from when we should start
* doing the export. (end date is assumed to be "now")
* The start date from when we should start doing the export.
*/
@JsonSerialize(using = JsonDateSerializer.class)
@JsonDeserialize(using = JsonDateDeserializer.class)
@JsonProperty("since")
private Date mySince;
/**
* The end date to which we should stop doing the export.
*/
@JsonSerialize(using = JsonDateSerializer.class)
@JsonDeserialize(using = JsonDateDeserializer.class)
@JsonProperty("until")
private Date myUntil;
@JsonProperty("exportId")
private String myExportId;
@ -147,6 +154,14 @@ public class BulkExportJobParameters extends BaseBatchJobParameters {
mySince = theSince;
}
public Date getUntil() {
return myUntil;
}
public void setUntil(Date theUntil) {
myUntil = theUntil;
}
public List<String> getFilters() {
if (myFilters == null) {
myFilters = new ArrayList<>();

View File

@ -22,19 +22,14 @@ package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.JobOperationResultJson;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
@ -42,28 +37,16 @@ import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.PreferHeader;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.ArrayUtil;
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.SearchParameterUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -75,44 +58,23 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters.ExportStyle;
import static ca.uhn.fhir.util.DatatypeUtil.toStringValue;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.slf4j.LoggerFactory.getLogger;
public class BulkDataExportProvider {
public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)";
public static final List<String> PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES =
List.of("Practitioner", "Organization");
/**
* Bulk data $export does not include the Binary type
*/
public static final String UNSUPPORTED_BINARY_TYPE = "Binary";
private static final Logger ourLog = getLogger(BulkDataExportProvider.class);
private static final Set<FhirVersionEnum> PATIENT_COMPARTMENT_FHIR_VERSIONS_SUPPORT_DEVICE = Set.of(
FhirVersionEnum.DSTU2,
FhirVersionEnum.DSTU2_1,
FhirVersionEnum.DSTU2_HL7ORG,
FhirVersionEnum.DSTU3,
FhirVersionEnum.R4,
FhirVersionEnum.R4B);
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
private Set<String> myCompartmentResources;
@Autowired
private FhirContext myFhirContext;
@ -167,85 +129,19 @@ public class BulkDataExportProvider {
IPrimitiveType<String> theExportId,
ServletRequestDetails theRequestDetails) {
// JPA export provider
validatePreferAsyncHeader(theRequestDetails, ProviderConstants.OPERATION_EXPORT);
BulkDataExportUtil.validatePreferAsyncHeader(theRequestDetails, ProviderConstants.OPERATION_EXPORT);
BulkExportJobParameters BulkExportJobParameters = buildSystemBulkExportOptions(
theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl);
BulkExportJobParameters bulkExportJobParameters = new BulkExportJobParametersBuilder()
.outputFormat(theOutputFormat)
.resourceTypes(theType)
.since(theSince)
.filters(theTypeFilter)
.exportIdentifier(theExportId)
.exportStyle(ExportStyle.SYSTEM)
.postFetchFilterUrl(theTypePostFetchFilterUrl)
.build();
startJob(theRequestDetails, BulkExportJobParameters);
}
private void startJob(ServletRequestDetails theRequestDetails, BulkExportJobParameters theOptions) {
// parameter massaging
expandParameters(theRequestDetails, theOptions);
// permission check
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_INITIATE_BULK_EXPORT)) {
HookParams initiateBulkExportHookParams = (new HookParams())
.add(BulkExportJobParameters.class, theOptions)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
compositeBroadcaster.callHooks(Pointcut.STORAGE_INITIATE_BULK_EXPORT, initiateBulkExportHookParams);
}
// get cache boolean
boolean useCache = shouldUseCache(theRequestDetails);
// start job
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setParameters(theOptions);
startRequest.setUseCache(useCache);
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, startRequest);
writePollingLocationToResponseHeaders(theRequestDetails, response.getInstanceId());
}
/**
* This method changes any parameters (limiting the _type parameter, for instance)
* so that later steps in the export do not have to handle them.
*/
private void expandParameters(ServletRequestDetails theRequestDetails, BulkExportJobParameters theOptions) {
// Set the original request URL as part of the job information, as this is used in the poll-status-endpoint, and
// is needed for the report.
theOptions.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
// If no _type parameter is provided, default to all resource types except Binary
if (theOptions.getResourceTypes().isEmpty()) {
List<String> resourceTypes = new ArrayList<>(myDaoRegistry.getRegisteredDaoTypes());
resourceTypes.remove(UNSUPPORTED_BINARY_TYPE);
theOptions.setResourceTypes(resourceTypes);
}
// Determine and validate partition permissions (if needed).
RequestPartitionId partitionId =
myRequestPartitionHelperService.determineReadPartitionForRequestForServerOperation(
theRequestDetails, ProviderConstants.OPERATION_EXPORT);
myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId);
theOptions.setPartitionId(partitionId);
// call hook so any other parameter manipulation can be done
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PRE_INITIATE_BULK_EXPORT)) {
HookParams preInitiateBulkExportHookParams = new HookParams();
preInitiateBulkExportHookParams.add(BulkExportJobParameters.class, theOptions);
preInitiateBulkExportHookParams.add(RequestDetails.class, theRequestDetails);
preInitiateBulkExportHookParams.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PRE_INITIATE_BULK_EXPORT, preInitiateBulkExportHookParams);
}
}
private boolean shouldUseCache(ServletRequestDetails theRequestDetails) {
CacheControlDirective cacheControlDirective =
new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL));
return myStorageSettings.getEnableBulkExportJobReuse() && !cacheControlDirective.isNoCache();
}
private String getServerBase(ServletRequestDetails theRequestDetails) {
return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/");
getBulkDataExportJobService().startJob(theRequestDetails, bulkExportJobParameters);
}
/**
@ -288,87 +184,30 @@ public class BulkDataExportProvider {
ourLog.debug("_typeFilter={}", theTypeFilter);
ourLog.debug("_mdm={}", theMdm);
validatePreferAsyncHeader(theRequestDetails, ProviderConstants.OPERATION_EXPORT);
BulkDataExportUtil.validatePreferAsyncHeader(theRequestDetails, ProviderConstants.OPERATION_EXPORT);
// verify the Group exists before starting the job
validateTargetsExists(theRequestDetails, "Group", List.of(theIdParam));
getBulkDataExportSupport().validateTargetsExists(theRequestDetails, "Group", List.of(theIdParam));
BulkExportJobParameters BulkExportJobParameters = buildGroupBulkExportOptions(
theOutputFormat,
theType,
theSince,
theTypeFilter,
theIdParam,
theMdm,
theExportIdentifier,
theTypePostFetchFilterUrl);
final BulkExportJobParameters bulkExportJobParameters = new BulkExportJobParametersBuilder()
.outputFormat(theOutputFormat)
.resourceTypes(theType)
.since(theSince)
.filters(theTypeFilter)
.exportIdentifier(theExportIdentifier)
.exportStyle(ExportStyle.GROUP)
.postFetchFilterUrl(theTypePostFetchFilterUrl)
.groupId(theIdParam)
.expandMdm(theMdm)
.build();
if (isNotEmpty(BulkExportJobParameters.getResourceTypes())) {
validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes());
} else {
// all patient resource types
Set<String> groupTypes = new HashSet<>(getPatientCompartmentResources());
// Add the forward reference resource types from the patients, e.g. Practitioner, Organization
groupTypes.addAll(PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES);
groupTypes.removeIf(t -> !myDaoRegistry.isResourceTypeSupported(t));
BulkExportJobParameters.setResourceTypes(groupTypes);
}
startJob(theRequestDetails, BulkExportJobParameters);
}
/**
* Throw ResourceNotFound if the target resources don't exist.
* Otherwise, we start a bulk-export job which then fails, reporting a 500.
*
* @param theRequestDetails the caller details
* @param theTargetResourceName the type of the target
* @param theIdParams the id(s) to verify exist
*/
private void validateTargetsExists(
RequestDetails theRequestDetails, String theTargetResourceName, Iterable<IIdType> theIdParams) {
if (theIdParams.iterator().hasNext()) {
RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(
theRequestDetails,
theTargetResourceName,
theIdParams.iterator().next());
SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId);
for (IIdType nextId : theIdParams) {
myDaoRegistry.getResourceDao(theTargetResourceName).read(nextId, requestDetails);
}
}
}
private void validateResourceTypesAllContainPatientSearchParams(Collection<String> theResourceTypes) {
if (theResourceTypes != null) {
List<String> badResourceTypes = theResourceTypes.stream()
.filter(resourceType ->
!PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(resourceType))
.filter(resourceType -> !getPatientCompartmentResources().contains(resourceType))
.collect(Collectors.toList());
if (!badResourceTypes.isEmpty()) {
throw new InvalidRequestException(Msg.code(512)
+ String.format(
"Resource types [%s] are invalid for this type of export, as they do not contain search parameters that refer to patients.",
String.join(",", badResourceTypes)));
}
}
}
private Set<String> getPatientCompartmentResources() {
return getPatientCompartmentResources(myFhirContext);
getBulkDataExportSupport().validateOrDefaultResourceTypesForGroupBulkExport(bulkExportJobParameters);
getBulkDataExportJobService().startJob(theRequestDetails, bulkExportJobParameters);
}
@VisibleForTesting
Set<String> getPatientCompartmentResources(FhirContext theFhirContext) {
if (myCompartmentResources == null) {
myCompartmentResources =
new HashSet<>(SearchParameterUtil.getAllResourceTypesThatAreInPatientCompartment(theFhirContext));
}
return myCompartmentResources;
return getBulkDataExportSupport().getPatientCompartmentResources(theFhirContext);
}
/**
@ -475,24 +314,36 @@ public class BulkDataExportProvider {
List<IPrimitiveType<String>> theTypeFilter,
List<IPrimitiveType<String>> theTypePostFetchFilterUrl,
List<IPrimitiveType<String>> thePatientIds) {
validatePreferAsyncHeader(theRequestDetails, ProviderConstants.OPERATION_EXPORT);
BulkDataExportUtil.validatePreferAsyncHeader(theRequestDetails, ProviderConstants.OPERATION_EXPORT);
validateTargetsExists(
theRequestDetails,
"Patient",
thePatientIds.stream().map(c -> new IdType(c.getValue())).collect(Collectors.toList()));
getBulkDataExportSupport()
.validateTargetsExists(
theRequestDetails,
"Patient",
thePatientIds.stream()
.map(c -> new IdType(c.getValue()))
.collect(Collectors.toList()));
BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions(
theOutputFormat,
theType,
theSince,
theTypeFilter,
theExportIdentifier,
thePatientIds,
theTypePostFetchFilterUrl);
validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes());
// set resourceTypes to all patient compartment resources if it is null
IPrimitiveType<String> resourceTypes = theType == null
? new StringDt(String.join(",", getBulkDataExportSupport().getPatientCompartmentResources()))
: theType;
startJob(theRequestDetails, BulkExportJobParameters);
BulkExportJobParameters bulkExportJobParameters = new BulkExportJobParametersBuilder()
.outputFormat(theOutputFormat)
.resourceTypes(resourceTypes)
.since(theSince)
.filters(theTypeFilter)
.exportIdentifier(theExportIdentifier)
.exportStyle(ExportStyle.PATIENT)
.postFetchFilterUrl(theTypePostFetchFilterUrl)
.patientIds(thePatientIds)
.build();
getBulkDataExportSupport()
.validateResourceTypesAllContainPatientSearchParams(bulkExportJobParameters.getResourceTypes());
getBulkDataExportJobService().startJob(theRequestDetails, bulkExportJobParameters);
}
/**
@ -563,7 +414,7 @@ public class BulkDataExportProvider {
bulkResponseDocument.setMsg(results.getReportMsg());
bulkResponseDocument.setRequest(results.getOriginalRequestUrl());
String serverBase = getServerBase(theRequestDetails);
String serverBase = BulkDataExportUtil.getServerBase(theRequestDetails);
// an output is required, even if empty, according to HL7 FHIR IG
bulkResponseDocument.getOutput();
@ -658,153 +509,6 @@ public class BulkDataExportProvider {
}
}
private BulkExportJobParameters buildSystemBulkExportOptions(
IPrimitiveType<String> theOutputFormat,
IPrimitiveType<String> theType,
IPrimitiveType<Date> theSince,
List<IPrimitiveType<String>> theTypeFilter,
IPrimitiveType<String> theExportId,
List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
return buildBulkExportJobParameters(
theOutputFormat,
theType,
theSince,
theTypeFilter,
theExportId,
BulkExportJobParameters.ExportStyle.SYSTEM,
theTypePostFetchFilterUrl);
}
private BulkExportJobParameters buildGroupBulkExportOptions(
IPrimitiveType<String> theOutputFormat,
IPrimitiveType<String> theType,
IPrimitiveType<Date> theSince,
List<IPrimitiveType<String>> theTypeFilter,
IIdType theGroupId,
IPrimitiveType<Boolean> theExpandMdm,
IPrimitiveType<String> theExportId,
List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(
theOutputFormat,
theType,
theSince,
theTypeFilter,
theExportId,
ExportStyle.GROUP,
theTypePostFetchFilterUrl);
BulkExportJobParameters.setGroupId(toStringValue(theGroupId));
boolean mdm = false;
if (theExpandMdm != null) {
mdm = theExpandMdm.getValue();
}
BulkExportJobParameters.setExpandMdm(mdm);
return BulkExportJobParameters;
}
private BulkExportJobParameters buildPatientBulkExportOptions(
IPrimitiveType<String> theOutputFormat,
IPrimitiveType<String> theType,
IPrimitiveType<Date> theSince,
List<IPrimitiveType<String>> theTypeFilter,
IPrimitiveType<String> theExportIdentifier,
List<IPrimitiveType<String>> thePatientIds,
List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
IPrimitiveType<String> type = theType;
if (type == null) {
// set type to all patient compartment resources if it is null
type = new StringDt(String.join(",", getPatientCompartmentResources()));
}
BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(
theOutputFormat,
type,
theSince,
theTypeFilter,
theExportIdentifier,
ExportStyle.PATIENT,
theTypePostFetchFilterUrl);
if (thePatientIds != null) {
BulkExportJobParameters.setPatientIds(
thePatientIds.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toSet()));
}
return BulkExportJobParameters;
}
private BulkExportJobParameters buildBulkExportJobParameters(
IPrimitiveType<String> theOutputFormat,
IPrimitiveType<String> theType,
IPrimitiveType<Date> theSince,
List<IPrimitiveType<String>> theTypeFilter,
IPrimitiveType<String> theExportIdentifier,
BulkExportJobParameters.ExportStyle theExportStyle,
List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : Constants.CT_FHIR_NDJSON;
Set<String> resourceTypes = null;
if (theType != null) {
resourceTypes = ArrayUtil.commaSeparatedListToCleanSet(theType.getValueAsString());
}
Date since = null;
if (theSince != null) {
since = theSince.getValue();
}
String exportIdentifier = null;
if (theExportIdentifier != null) {
exportIdentifier = theExportIdentifier.getValueAsString();
}
Set<String> typeFilters = splitTypeFilters(theTypeFilter);
Set<String> typePostFetchFilterUrls = splitTypeFilters(theTypePostFetchFilterUrl);
BulkExportJobParameters BulkExportJobParameters = new BulkExportJobParameters();
BulkExportJobParameters.setFilters(typeFilters);
BulkExportJobParameters.setPostFetchFilterUrls(typePostFetchFilterUrls);
BulkExportJobParameters.setExportStyle(theExportStyle);
BulkExportJobParameters.setExportIdentifier(exportIdentifier);
BulkExportJobParameters.setSince(since);
BulkExportJobParameters.setResourceTypes(resourceTypes);
BulkExportJobParameters.setOutputFormat(outputFormat);
return BulkExportJobParameters;
}
public void writePollingLocationToResponseHeaders(ServletRequestDetails theRequestDetails, String theInstanceId) {
String serverBase = getServerBase(theRequestDetails);
if (serverBase == null) {
throw new InternalErrorException(Msg.code(2136) + "Unable to get the server base.");
}
String pollLocation = serverBase + "/" + ProviderConstants.OPERATION_EXPORT_POLL_STATUS + "?"
+ JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theInstanceId;
pollLocation = UrlUtil.sanitizeHeaderValue(pollLocation);
HttpServletResponse response = theRequestDetails.getServletResponse();
// Add standard headers
theRequestDetails.getServer().addHeadersToResponse(response);
// Successful 202 Accepted
response.addHeader(Constants.HEADER_CONTENT_LOCATION, pollLocation);
response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED);
}
private Set<String> splitTypeFilters(List<IPrimitiveType<String>> theTypeFilter) {
if (theTypeFilter == null) {
return null;
}
Set<String> retVal = new HashSet<>();
for (IPrimitiveType<String> next : theTypeFilter) {
String typeFilterString = next.getValueAsString();
Arrays.stream(typeFilterString.split(FARM_TO_TABLE_TYPE_FILTER_REGEX))
.filter(StringUtils::isNotBlank)
.forEach(retVal::add);
}
return retVal;
}
@VisibleForTesting
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
myStorageSettings = theStorageSettings;
@ -815,11 +519,29 @@ public class BulkDataExportProvider {
myDaoRegistry = theDaoRegistry;
}
public static void validatePreferAsyncHeader(ServletRequestDetails theRequestDetails, String theOperationName) {
String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER);
PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader);
if (!prefer.getRespondAsync()) {
throw new InvalidRequestException(Msg.code(513) + "Must request async processing for " + theOperationName);
// Do not use this variable directly, use getBulkDataExportJobService() instead
private BulkExportJobService myBulkExportJobService;
private BulkExportJobService getBulkDataExportJobService() {
if (myBulkExportJobService == null) {
myBulkExportJobService = new BulkExportJobService(
myInterceptorBroadcaster,
myJobCoordinator,
myDaoRegistry,
myRequestPartitionHelperService,
myStorageSettings);
}
return myBulkExportJobService;
}
// Do not use this variable directly, use getBulkDataExportSupport() instead
private BulkDataExportSupport myBulkDataExportSupport;
private BulkDataExportSupport getBulkDataExportSupport() {
if (myBulkDataExportSupport == null) {
myBulkDataExportSupport =
new BulkDataExportSupport(myFhirContext, myDaoRegistry, myRequestPartitionHelperService);
}
return myBulkDataExportSupport;
}
}

View File

@ -0,0 +1,131 @@
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.SearchParameterUtil;
import jakarta.annotation.Nonnull;
import org.apache.commons.collections4.CollectionUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* This class is responsible for validating if a target exists
* and if the requested export resource types contain search references to Patient.
* In addition to that it also sets resource types in BulkExportJobParameters,
* which are part of Patient compartment, when export resource types are not provided.
*/
public class BulkDataExportSupport {
private final FhirContext myFhirContext;
private final DaoRegistry myDaoRegistry;
private final IRequestPartitionHelperSvc myRequestPartitionHelperService;
private Set<String> myCompartmentResources;
public BulkDataExportSupport(
@Nonnull FhirContext theFhirContext,
@Nonnull DaoRegistry theDaoRegistry,
@Nonnull IRequestPartitionHelperSvc theRequestPartitionHelperService) {
myFhirContext = theFhirContext;
myDaoRegistry = theDaoRegistry;
myRequestPartitionHelperService = theRequestPartitionHelperService;
}
/**
* Throw ResourceNotFound if the target resources don't exist.
*
* @param theRequestDetails the caller details
* @param theTargetResourceName the type of the target
* @param theIdParams the id(s) to verify exist
*/
public void validateTargetsExists(
@Nonnull RequestDetails theRequestDetails,
@Nonnull String theTargetResourceName,
@Nonnull Iterable<IIdType> theIdParams) {
if (theIdParams.iterator().hasNext()) {
RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(
theRequestDetails,
theTargetResourceName,
theIdParams.iterator().next());
SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId);
for (IIdType nextId : theIdParams) {
myDaoRegistry.getResourceDao(theTargetResourceName).read(nextId, requestDetails);
}
}
}
public void validateOrDefaultResourceTypesForGroupBulkExport(
@Nonnull BulkExportJobParameters theBulkExportJobParameters) {
if (CollectionUtils.isNotEmpty(theBulkExportJobParameters.getResourceTypes())) {
validateResourceTypesAllContainPatientSearchParams(theBulkExportJobParameters.getResourceTypes());
} else {
// all patient resource types
Set<String> groupTypes = new HashSet<>(getPatientCompartmentResources());
// Add the forward reference resource types from the patients, e.g. Practitioner, Organization
groupTypes.addAll(BulkDataExportUtil.PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES);
groupTypes.removeIf(t -> !myDaoRegistry.isResourceTypeSupported(t));
theBulkExportJobParameters.setResourceTypes(groupTypes);
}
}
public void validateResourceTypesAllContainPatientSearchParams(Collection<String> theResourceTypes) {
if (theResourceTypes != null) {
List<String> badResourceTypes = theResourceTypes.stream()
.filter(resourceType ->
!BulkDataExportUtil.PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(
resourceType))
.filter(resourceType -> !getPatientCompartmentResources().contains(resourceType))
.collect(Collectors.toList());
if (!badResourceTypes.isEmpty()) {
throw new InvalidRequestException(Msg.code(512)
+ String.format(
"Resource types [%s] are invalid for this type of export, as they do not contain search parameters that refer to patients.",
String.join(",", badResourceTypes)));
}
}
}
public Set<String> getPatientCompartmentResources() {
return getPatientCompartmentResources(myFhirContext);
}
Set<String> getPatientCompartmentResources(FhirContext theFhirContext) {
if (myCompartmentResources == null) {
myCompartmentResources =
new HashSet<>(SearchParameterUtil.getAllResourceTypesThatAreInPatientCompartment(theFhirContext));
}
return myCompartmentResources;
}
}

View File

@ -0,0 +1,37 @@
package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.PreferHeader;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
public class BulkDataExportUtil {
public static final List<String> PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES =
List.of("Practitioner", "Organization");
/**
* Bulk data $export does not include the Binary type
*/
public static final String UNSUPPORTED_BINARY_TYPE = "Binary";
private BulkDataExportUtil() {
// non-instantiable
}
public static void validatePreferAsyncHeader(ServletRequestDetails theRequestDetails, String theOperationName) {
String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER);
PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader);
if (!prefer.getRespondAsync()) {
throw new InvalidRequestException(Msg.code(513) + "Must request async processing for " + theOperationName);
}
}
public static String getServerBase(ServletRequestDetails theRequestDetails) {
return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/");
}
}

View File

@ -0,0 +1,135 @@
package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.util.ArrayUtil;
import ca.uhn.fhir.util.DatatypeUtil;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* This is a Builder class which helps with building
* BulkExportJobParameters object
*/
public class BulkExportJobParametersBuilder {
public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)";
private Set<String> myResourceTypes;
private Date mySince;
private Date myUntil;
private Set<String> myFilters;
private String myOutputFormat;
private BulkExportJobParameters.ExportStyle myExportStyle;
private List<String> myPatientIds = new ArrayList<>();
private String myGroupId;
private boolean myExpandMdm;
private RequestPartitionId myPartitionId;
private String myExportIdentifier;
private Set<String> myPostFetchFilterUrls;
public BulkExportJobParametersBuilder resourceTypes(IPrimitiveType<String> theResourceTypes) {
myResourceTypes = theResourceTypes == null
? null
: ArrayUtil.commaSeparatedListToCleanSet(theResourceTypes.getValueAsString());
return this;
}
public BulkExportJobParametersBuilder since(IPrimitiveType<Date> theSince) {
mySince = DatatypeUtil.toDateValue(theSince);
return this;
}
public BulkExportJobParametersBuilder until(IPrimitiveType<Date> theUntil) {
myUntil = DatatypeUtil.toDateValue(theUntil);
return this;
}
public BulkExportJobParametersBuilder filters(List<IPrimitiveType<String>> theFilters) {
myFilters = parseFilters(theFilters);
return this;
}
public BulkExportJobParametersBuilder outputFormat(IPrimitiveType<String> theOutputFormat) {
myOutputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : Constants.CT_FHIR_NDJSON;
return this;
}
public BulkExportJobParametersBuilder exportStyle(BulkExportJobParameters.ExportStyle theExportStyle) {
myExportStyle = theExportStyle;
return this;
}
public BulkExportJobParametersBuilder patientIds(List<IPrimitiveType<String>> thePatientIds) {
myPatientIds = thePatientIds == null
? null
: thePatientIds.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList());
return this;
}
public BulkExportJobParametersBuilder groupId(IIdType theGroupId) {
myGroupId = DatatypeUtil.toStringValue(theGroupId);
return this;
}
public BulkExportJobParametersBuilder expandMdm(IPrimitiveType<Boolean> theExpandMdm) {
final Boolean booleanValue = DatatypeUtil.toBooleanValue(theExpandMdm);
myExpandMdm = booleanValue != null && booleanValue;
return this;
}
public BulkExportJobParametersBuilder partitionId(RequestPartitionId thePartitionId) {
myPartitionId = thePartitionId;
return this;
}
public BulkExportJobParametersBuilder exportIdentifier(IPrimitiveType<String> theExportIdentifier) {
myExportIdentifier = DatatypeUtil.toStringValue(theExportIdentifier);
return this;
}
public BulkExportJobParametersBuilder postFetchFilterUrl(List<IPrimitiveType<String>> thePostFetchFilterUrl) {
myPostFetchFilterUrls = parseFilters(thePostFetchFilterUrl);
return this;
}
public BulkExportJobParameters build() {
BulkExportJobParameters result = new BulkExportJobParameters();
result.setExpandMdm(myExpandMdm);
result.setExportIdentifier(myExportIdentifier);
result.setExportStyle(myExportStyle);
result.setFilters(myFilters);
result.setGroupId(myGroupId);
result.setOutputFormat(myOutputFormat);
result.setPartitionId(myPartitionId);
result.setPatientIds(myPatientIds);
result.setResourceTypes(myResourceTypes);
result.setSince(mySince);
result.setUntil(myUntil);
result.setPostFetchFilterUrls(myPostFetchFilterUrls);
return result;
}
private Set<String> parseFilters(List<IPrimitiveType<String>> theFilters) {
Set<String> retVal = null;
if (theFilters != null) {
retVal = new HashSet<>();
for (IPrimitiveType<String> next : theFilters) {
String typeFilterString = next.getValueAsString();
Arrays.stream(typeFilterString.split(FARM_TO_TABLE_TYPE_FILTER_REGEX))
.filter(StringUtils::isNotBlank)
.forEach(retVal::add);
}
}
return retVal;
}
}

View File

@ -40,9 +40,9 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
public class BulkExportJobParametersValidator implements IJobParametersValidator<BulkExportJobParameters> {
/** @deprecated use BulkDataExportProvider.UNSUPPORTED_BINARY_TYPE instead */
/** @deprecated use BulkDataExportUtil.UNSUPPORTED_BINARY_TYPE instead */
@Deprecated(since = "6.3.10")
public static final String UNSUPPORTED_BINARY_TYPE = BulkDataExportProvider.UNSUPPORTED_BINARY_TYPE;
public static final String UNSUPPORTED_BINARY_TYPE = BulkDataExportUtil.UNSUPPORTED_BINARY_TYPE;
@Autowired
private DaoRegistry myDaoRegistry;
@ -62,7 +62,7 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator
List<String> resourceTypes = theParameters.getResourceTypes();
if (resourceTypes != null && !resourceTypes.isEmpty()) {
for (String resourceType : theParameters.getResourceTypes()) {
if (resourceType.equalsIgnoreCase(UNSUPPORTED_BINARY_TYPE)) {
if (resourceType.equalsIgnoreCase(BulkDataExportUtil.UNSUPPORTED_BINARY_TYPE)) {
errorMsgs.add("Bulk export of Binary resources is forbidden");
} else if (!myDaoRegistry.isResourceTypeSupported(resourceType)) {
errorMsgs.add("Resource type " + resourceType + " is not a supported resource type!");

View File

@ -0,0 +1,165 @@
package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.UrlUtil;
import jakarta.annotation.Nonnull;
import jakarta.servlet.http.HttpServletResponse;
import java.util.ArrayList;
import java.util.List;
/**
* This class is responsible for initiating a bulk export job
* with appropriate _type parameter & partitionId as well as
* generating response for request which includes the polling location.
* It also calls hooks which can update BulkExportJobParameters and the incoming requests.
*/
public class BulkExportJobService {
private final IInterceptorBroadcaster myInterceptorBroadcaster;
private final IJobCoordinator myJobCoordinator;
private final DaoRegistry myDaoRegistry;
private final IRequestPartitionHelperSvc myRequestPartitionHelperService;
private final JpaStorageSettings myStorageSettings;
public BulkExportJobService(
@Nonnull IInterceptorBroadcaster theInterceptorBroadcaster,
@Nonnull IJobCoordinator theJobCoordinator,
@Nonnull DaoRegistry theDaoRegistry,
@Nonnull IRequestPartitionHelperSvc theRequestPartitionHelperService,
@Nonnull JpaStorageSettings theStorageSettings) {
myInterceptorBroadcaster = theInterceptorBroadcaster;
myJobCoordinator = theJobCoordinator;
myDaoRegistry = theDaoRegistry;
myRequestPartitionHelperService = theRequestPartitionHelperService;
myStorageSettings = theStorageSettings;
}
/**
* Start BulkExport job with appropriate parameters
*/
public void startJob(
@Nonnull ServletRequestDetails theRequestDetails,
@Nonnull BulkExportJobParameters theBulkExportJobParameters) {
// parameter massaging
expandParameters(theRequestDetails, theBulkExportJobParameters);
callBulkExportHooks(theRequestDetails, theBulkExportJobParameters);
// get cache boolean
boolean useCache = shouldUseCache(theRequestDetails);
// start job
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setParameters(theBulkExportJobParameters);
startRequest.setUseCache(useCache);
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, startRequest);
writePollingLocationToResponseHeaders(theRequestDetails, response.getInstanceId());
}
/**
* This method changes any parameters (limiting the _type parameter, for instance)
* so that later steps in the export do not have to handle them.
*/
private void expandParameters(
@Nonnull ServletRequestDetails theRequestDetails,
@Nonnull BulkExportJobParameters theBulkExportJobParameters) {
// Set the original request URL as part of the job information, as this is used in the poll-status-endpoint, and
// is needed for the report.
theBulkExportJobParameters.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
// If no _type parameter is provided, default to all resource types except Binary
if (theBulkExportJobParameters.getResourceTypes().isEmpty()) {
List<String> resourceTypes = new ArrayList<>(myDaoRegistry.getRegisteredDaoTypes());
resourceTypes.remove(BulkDataExportUtil.UNSUPPORTED_BINARY_TYPE);
theBulkExportJobParameters.setResourceTypes(resourceTypes);
}
// Determine and validate partition permissions (if needed).
RequestPartitionId partitionId =
myRequestPartitionHelperService.determineReadPartitionForRequestForServerOperation(
theRequestDetails, ProviderConstants.OPERATION_EXPORT);
myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId);
theBulkExportJobParameters.setPartitionId(partitionId);
}
/**
* This method calls STORAGE_PRE_INITIATE_BULK_EXPORT & STORAGE_INITIATE_BULK_EXPORT,
* if present, which allows modification to the request and the bulk export job parameters
*/
private void callBulkExportHooks(
@Nonnull ServletRequestDetails theRequestDetails,
@Nonnull BulkExportJobParameters theBulkExportJobParameters) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PRE_INITIATE_BULK_EXPORT)) {
HookParams preInitiateBulkExportHookParams = new HookParams()
.add(BulkExportJobParameters.class, theBulkExportJobParameters)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PRE_INITIATE_BULK_EXPORT, preInitiateBulkExportHookParams);
}
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_INITIATE_BULK_EXPORT)) {
HookParams initiateBulkExportHookParams = (new HookParams())
.add(BulkExportJobParameters.class, theBulkExportJobParameters)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
compositeBroadcaster.callHooks(Pointcut.STORAGE_INITIATE_BULK_EXPORT, initiateBulkExportHookParams);
}
}
/**
* This method checks if the request has the cache-control header
* set to no-cache
*/
private boolean shouldUseCache(@Nonnull ServletRequestDetails theRequestDetails) {
CacheControlDirective cacheControlDirective =
new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL));
return myStorageSettings.getEnableBulkExportJobReuse() && !cacheControlDirective.isNoCache();
}
/**
* This method generates response for the bulk export request
* which contains the polling location
*/
private void writePollingLocationToResponseHeaders(
@Nonnull ServletRequestDetails theRequestDetails, @Nonnull String theInstanceId) {
String serverBase = BulkDataExportUtil.getServerBase(theRequestDetails);
if (serverBase == null) {
throw new InternalErrorException(Msg.code(2136) + "Unable to get the server base.");
}
String pollLocation = serverBase + "/" + ProviderConstants.OPERATION_EXPORT_POLL_STATUS + "?"
+ JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theInstanceId;
pollLocation = UrlUtil.sanitizeHeaderValue(pollLocation);
HttpServletResponse response = theRequestDetails.getServletResponse();
// Add standard headers
theRequestDetails.getServer().addHeadersToResponse(response);
// Successful 202 Accepted
response.addHeader(Constants.HEADER_CONTENT_LOCATION, pollLocation);
response.setStatus(Constants.STATUS_HTTP_202_ACCEPTED);
}
}

View File

@ -70,6 +70,7 @@ public class FetchResourceIdsStep implements IFirstJobStepWorker<BulkExportJobPa
providerParams.setChunkId(theStepExecutionDetails.getChunkId());
providerParams.setFilters(params.getFilters());
providerParams.setStartDate(params.getSince());
providerParams.setEndDate(params.getUntil());
providerParams.setExportStyle(params.getExportStyle());
providerParams.setGroupId(params.getGroupId());
providerParams.setPatientIds(params.getPatientIds());

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.batch2.jobs.imprt;
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportUtil;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.context.FhirContext;
@ -59,7 +60,6 @@ import java.util.Date;
import java.util.List;
import java.util.Optional;
import static ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider.validatePreferAsyncHeader;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class BulkDataImportProvider {
@ -127,7 +127,7 @@ public class BulkDataImportProvider {
HttpServletResponse theResponse)
throws IOException {
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_IMPORT);
BulkDataExportUtil.validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_IMPORT);
BulkImportJobParameters jobParameters = new BulkImportJobParameters();

View File

@ -0,0 +1,66 @@
package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
@ExtendWith(MockitoExtension.class)
class BulkDataExportUtilTest {
private static final String URL = "http://localhost:8080";
private static final String OPERATION_NAME = "Operation Name";
@Mock
private ServletRequestDetails theRequestDetails;
@AfterEach
void tearDown() {
verifyNoMoreInteractions(theRequestDetails);
}
@ParameterizedTest
@EnumSource(value = PreferReturnEnum.class)
void validatePreferAsyncHeaderShouldThrowException(PreferReturnEnum thePreferReturnEnum) {
// Arrange
doReturn(thePreferReturnEnum.getHeaderValue()).when(theRequestDetails).getHeader(Constants.HEADER_PREFER);
// Act
assertThatThrownBy(() -> BulkDataExportUtil.validatePreferAsyncHeader(theRequestDetails, OPERATION_NAME))
.isInstanceOf(InvalidRequestException.class)
.hasMessageContaining("Must request async processing for " + OPERATION_NAME);
// Assert
verify(theRequestDetails).getHeader(Constants.HEADER_PREFER);
}
@Test
void validatePreferAsyncHeaderShouldNotThrowException() {
// Arrange
doReturn(Constants.HEADER_PREFER_RESPOND_ASYNC).when(theRequestDetails).getHeader(Constants.HEADER_PREFER);
// Act
assertThatNoException().isThrownBy(() -> BulkDataExportUtil.validatePreferAsyncHeader(theRequestDetails, OPERATION_NAME));
// Assert
verify(theRequestDetails).getHeader(Constants.HEADER_PREFER);
}
@Test
void getServerBase() {
// Arrange
doReturn(URL + "/").when(theRequestDetails).getServerBaseForRequest();
// Act
final String actual = BulkDataExportUtil.getServerBase(theRequestDetails);
// Assert
assertThat(actual).isEqualTo(URL);
verify(theRequestDetails).getServerBaseForRequest();
}
}

View File

@ -0,0 +1,199 @@
package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.model.primitive.BooleanDt;
import ca.uhn.fhir.model.primitive.DateDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.IdType;
import org.junit.jupiter.api.Test;
import java.util.Date;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
class BulkExportJobParametersBuilderTest {
private final BulkExportJobParametersBuilder myFixture = new BulkExportJobParametersBuilder();
@Test
void resourceTypes() {
// Arrange
final List<String> expected = List.of("Patient", "Observation", "MedicationRequest");
final IPrimitiveType<String> resourceTypes = new StringDt(String.join(",", expected));
// Act
myFixture.resourceTypes(resourceTypes);
// Assert
assertThat(myFixture.build().getResourceTypes()).containsAll(expected);
}
@Test
void resourceTypesWhenNull() {
// Act
myFixture.resourceTypes(null);
// Assert
assertThat(myFixture.build().getResourceTypes()).isEmpty();
}
@Test
void since() {
// Arrange
final Date expected = new Date();
final IPrimitiveType<Date> since = new DateDt(expected);
// Act
myFixture.since(since);
// Assert
assertThat(myFixture.build().getSince()).isEqualTo(expected);
}
@Test
void until() {
// Arrange
final Date expected = new Date();
final IPrimitiveType<Date> until = new DateDt(expected);
// Act
myFixture.until(until);
// Assert
assertThat(myFixture.build().getUntil()).isEqualTo(expected);
}
@Test
void filters() {
// Arrange
final List<String> expected = List.of("Patient", "Observation", "MedicationRequest");
final List<IPrimitiveType<String>> filters = expected.stream().map(value -> (IPrimitiveType<String>) new StringDt(value)).toList();
// Act
myFixture.filters(filters);
// Assert
assertThat(myFixture.build().getFilters()).containsAll(expected);
}
@Test
void filtersWhenNull() {
// Act
myFixture.filters(null);
// Assert
assertThat(myFixture.build().getFilters()).isEmpty();
}
@Test
void outputFormat() {
// Arrange
final String expected = "some value";
final IPrimitiveType<String> outputFormat = new StringDt(expected);
// Act
myFixture.outputFormat(outputFormat);
// Assert
assertThat(myFixture.build().getOutputFormat()).isEqualTo(expected);
}
@Test
void outputFormatWhenNull() {
// Act
myFixture.outputFormat(null);
// Assert
assertThat(myFixture.build().getOutputFormat()).isEqualTo(Constants.CT_FHIR_NDJSON);
}
@Test
void exportStyle() {
// Arrange
final BulkExportJobParameters.ExportStyle expected = BulkExportJobParameters.ExportStyle.SYSTEM;
// Act
myFixture.exportStyle(expected);
// Assert
assertThat(myFixture.build().getExportStyle()).isEqualTo(expected);
}
@Test
void patientIds() {
// Arrange
final List<String> expected = List.of("ID1", "ID2", "ID3");
final List<IPrimitiveType<String>> patientIds = expected.stream().map(value -> (IPrimitiveType<String>) new StringDt(value)).toList();
// Act
myFixture.patientIds(patientIds);
// Assert
assertThat(myFixture.build().getPatientIds()).containsAll(expected);
}
@Test
void patientIdsWhenNull() {
// Act
myFixture.patientIds(null);
// Assert
assertThat(myFixture.build().getPatientIds()).isEmpty();
}
@Test
void groupId() {
// Arrange
final String expected = "GROUP_ID";
final IdType groupId = new IdType(expected);
// Act
myFixture.groupId(groupId);
// Assert
assertThat(myFixture.build().getGroupId()).isEqualTo(expected);
}
@Test
void expandMdm() {
// Arrange
final IPrimitiveType<Boolean> expandMdm = new BooleanDt(Boolean.TRUE);
// Act
myFixture.expandMdm(expandMdm);
// Assert
assertThat(myFixture.build().isExpandMdm()).isTrue();
}
@Test
void expandMdmWhenNull() {
// Act
myFixture.expandMdm(null);
// Assert
assertThat(myFixture.build().isExpandMdm()).isFalse();
}
@Test
void partitionId() {
// Arrange
final RequestPartitionId expected = RequestPartitionId.fromPartitionName("PARTITION_NAME");
// Act
myFixture.partitionId(expected);
// Assert
assertThat(myFixture.build().getPartitionId()).isEqualTo(expected);
}
@Test
void exportIdentifier() {
// Arrange
final String expected = "EXPORT_IDENTIFIER";
final StringDt exportIdentifier = new StringDt(expected);
// Act
myFixture.exportIdentifier(exportIdentifier);
// Assert
assertThat(myFixture.build().getExportIdentifier()).isEqualTo(expected);
}
@Test
void postFetchFilterUrl() {
// Arrange
final List<String> expected = List.of("URL1", "URL2", "URL3");
final List<IPrimitiveType<String>> postFetchFilterUrls = expected.stream().map(value -> (IPrimitiveType<String>) new StringDt(value)).toList();
// Act
myFixture.postFetchFilterUrl(postFetchFilterUrls);
// Assert
assertThat(myFixture.build().getPostFetchFilterUrls()).containsAll(expected);
}
@Test
void postFetchFilterUrlWhenNull() {
// Act
myFixture.postFetchFilterUrl(null);
// Assert
assertThat(myFixture.build().getPostFetchFilterUrls()).isEmpty();
}
}

View File

@ -41,6 +41,11 @@ public class ExportPIDIteratorParameters {
*/
private Date myStartDate;
/**
* The latest date to which to retrieve records
*/
private Date myEndDate;
/**
* List of filters to be applied to the search.
* Eg:
@ -108,6 +113,14 @@ public class ExportPIDIteratorParameters {
myStartDate = theStartDate;
}
public Date getEndDate() {
return myEndDate;
}
public void setEndDate(Date theEndDate) {
myEndDate = theEndDate;
}
public List<String> getFilters() {
return myFilters;
}

View File

@ -48,16 +48,17 @@ public class BulkExportHelperService {
/**
* Given the parameters, create the search parameter map based on type filters and the _since parameter.
*
* The input boolean theConsiderSince determines whether to consider the lastUpdated date in the search parameter map.
* The input boolean theConsiderDateRange determines whether to consider the lastUpdated date in the search parameter map.
*/
public List<SearchParameterMap> createSearchParameterMapsForResourceType(
RuntimeResourceDefinition theDef, ExportPIDIteratorParameters theParams, boolean theConsiderSince) {
RuntimeResourceDefinition theDef, ExportPIDIteratorParameters theParams, boolean theConsiderDateRange) {
String resourceType = theDef.getName();
List<String> typeFilters = theParams.getFilters();
List<SearchParameterMap> spMaps = null;
spMaps = typeFilters.stream()
.filter(typeFilter -> typeFilter.startsWith(resourceType + "?"))
.map(filter -> buildSearchParameterMapForTypeFilter(filter, theDef, theParams.getStartDate()))
.map(filter -> buildSearchParameterMapForTypeFilter(
filter, theDef, theParams.getStartDate(), theParams.getEndDate()))
.collect(Collectors.toList());
typeFilters.stream().filter(filter -> !filter.contains("?")).forEach(filter -> {
@ -69,8 +70,8 @@ public class BulkExportHelperService {
// None of the _typeFilters applied to the current resource type, so just make a simple one.
if (spMaps.isEmpty()) {
SearchParameterMap defaultMap = new SearchParameterMap();
if (theConsiderSince) {
enhanceSearchParameterMapWithCommonParameters(defaultMap, theParams.getStartDate());
if (theConsiderDateRange) {
addLastUpdatedFilter(defaultMap, theParams.getStartDate(), theParams.getEndDate());
}
spMaps = Collections.singletonList(defaultMap);
}
@ -79,16 +80,16 @@ public class BulkExportHelperService {
}
private SearchParameterMap buildSearchParameterMapForTypeFilter(
String theFilter, RuntimeResourceDefinition theDef, Date theSinceDate) {
String theFilter, RuntimeResourceDefinition theDef, Date theStartDate, Date theEndDate) {
SearchParameterMap searchParameterMap = myMatchUrlService.translateMatchUrl(theFilter, theDef);
enhanceSearchParameterMapWithCommonParameters(searchParameterMap, theSinceDate);
addLastUpdatedFilter(searchParameterMap, theStartDate, theEndDate);
return searchParameterMap;
}
private void enhanceSearchParameterMapWithCommonParameters(SearchParameterMap map, Date theSinceDate) {
void addLastUpdatedFilter(SearchParameterMap map, Date theStartDate, Date theEndDate) {
map.setLoadSynchronous(true);
if (theSinceDate != null) {
map.setLastUpdated(new DateRangeParam(theSinceDate, null));
if (theStartDate != null || theEndDate != null) {
map.setLastUpdated(new DateRangeParam(theStartDate, theEndDate));
}
}