Add initiate bulk export pointcut (#2692)
* Add initiate bulk export pointcut * Add changelog * Test fixes
This commit is contained in:
parent
94bd2400e3
commit
05ac88bc21
|
@ -1005,6 +1005,43 @@ public enum Pointcut implements IPointcut {
|
|||
"org.hl7.fhir.instance.model.api.IBaseResource"
|
||||
),
|
||||
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked when a Bulk Export job is being kicked off. Hook methods may modify
|
||||
* the request, or raise an exception to prevent it from being initiated.
|
||||
* <p>
|
||||
* Hooks may accept the following parameters:
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>
|
||||
* ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions - The details of the job being kicked off
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
* pulled out of the servlet request. Note that the bean
|
||||
* properties are not all guaranteed to be populated, depending on how early during processing the
|
||||
* exception occurred. <b>Note that this parameter may be null in contexts where the request is not
|
||||
* known, such as while processing searches</b>
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
|
||||
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>, and can throw exceptions.
|
||||
* </p>
|
||||
*/
|
||||
STORAGE_INITIATE_BULK_EXPORT(
|
||||
void.class,
|
||||
"ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||
),
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked when a set of resources are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2692
|
||||
title: "A new Pointcut has been added that is invoked when a new Bulk Export is initiated."
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.bulk.export.api;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
|
@ -37,7 +38,7 @@ public interface IBulkDataExportSvc {
|
|||
|
||||
JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions);
|
||||
|
||||
JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache);
|
||||
JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache, RequestDetails theRequestDetails);
|
||||
|
||||
JobInfo getJobInfoOrThrowResourceNotFound(String theJobId);
|
||||
|
||||
|
|
|
@ -91,7 +91,7 @@ public class BulkDataExportProvider {
|
|||
validatePreferAsyncHeader(theRequestDetails);
|
||||
BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter);
|
||||
Boolean useCache = shouldUseCache(theRequestDetails);
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, useCache);
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, useCache, theRequestDetails);
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, outcome);
|
||||
}
|
||||
|
||||
|
@ -127,7 +127,7 @@ public class BulkDataExportProvider {
|
|||
validatePreferAsyncHeader(theRequestDetails);
|
||||
BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm);
|
||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails));
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails), null);
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, outcome);
|
||||
}
|
||||
|
||||
|
@ -157,7 +157,7 @@ public class BulkDataExportProvider {
|
|||
validatePreferAsyncHeader(theRequestDetails);
|
||||
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter);
|
||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails));
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails), null);
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, outcome);
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,9 @@ package ca.uhn.fhir.jpa.bulk.export.svc;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
|
@ -45,8 +48,10 @@ import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
|||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
|
@ -295,12 +300,23 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
@Transactional
|
||||
@Override
|
||||
public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions) {
|
||||
return submitJob(theBulkDataExportOptions, true);
|
||||
return submitJob(theBulkDataExportOptions, true, null);
|
||||
}
|
||||
|
||||
@Autowired
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache) {
|
||||
public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache, RequestDetails theRequestDetails) {
|
||||
|
||||
// Interceptor call: STORAGE_INITIATE_BULK_EXPORT
|
||||
HookParams params = new HookParams()
|
||||
.add(BulkDataExportOptions.class, theBulkDataExportOptions)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_INITIATE_BULK_EXPORT, params);
|
||||
|
||||
String outputFormat = Constants.CT_FHIR_NDJSON;
|
||||
if (isNotBlank(theBulkDataExportOptions.getOutputFormat())) {
|
||||
outputFormat = theBulkDataExportOptions.getOutputFormat();
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.bulk;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
|
@ -9,6 +10,7 @@ import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
|||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
|
@ -58,6 +60,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
|||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyBoolean;
|
||||
import static org.mockito.ArgumentMatchers.isNull;
|
||||
import static org.mockito.ArgumentMatchers.nullable;
|
||||
import static org.mockito.Mockito.eq;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
@ -75,6 +78,8 @@ public class BulkDataExportProviderTest {
|
|||
private int myPort;
|
||||
@Mock
|
||||
private IBulkDataExportSvc myBulkDataExportSvc;
|
||||
@Mock
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
private CloseableHttpClient myClient;
|
||||
@Captor
|
||||
private ArgumentCaptor<BulkDataExportOptions> myBulkDataExportOptionsCaptor;
|
||||
|
@ -116,7 +121,7 @@ public class BulkDataExportProviderTest {
|
|||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
|
@ -140,7 +145,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any());
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class));
|
||||
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
|
||||
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
|
||||
|
@ -154,7 +159,7 @@ public class BulkDataExportProviderTest {
|
|||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(),any())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.submitJob(any(),any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
|
@ -175,7 +180,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any());
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class));
|
||||
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
|
||||
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
|
||||
|
@ -304,7 +309,7 @@ public class BulkDataExportProviderTest {
|
|||
public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException {
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(),any())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.submitJob(any(),any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Observation", "DiagnosticReport"));
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
@ -331,7 +336,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any());
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class));
|
||||
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
|
||||
assertThat(options.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport"));
|
||||
|
@ -345,7 +350,7 @@ public class BulkDataExportProviderTest {
|
|||
public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException {
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Patient", "Practitioner"));
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
@ -368,7 +373,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any());
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class));
|
||||
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
|
||||
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
|
||||
|
@ -410,7 +415,7 @@ public class BulkDataExportProviderTest {
|
|||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
myClient.execute(get);
|
||||
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), anyBoolean());
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), anyBoolean(), nullable(RequestDetails.class));
|
||||
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
|
||||
|
||||
assertThat(options.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1));
|
||||
|
@ -437,7 +442,7 @@ public class BulkDataExportProviderTest {
|
|||
public void testInitiateGroupExportWithNoResourceTypes() throws IOException {
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||
|
||||
String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT
|
||||
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON);;
|
||||
|
@ -454,7 +459,7 @@ public class BulkDataExportProviderTest {
|
|||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
|
@ -477,7 +482,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture());
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture(), nullable(RequestDetails.class));
|
||||
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
|
||||
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient"));
|
||||
|
@ -488,7 +493,7 @@ public class BulkDataExportProviderTest {
|
|||
public void testInitiatePatientExportRequest() throws IOException {
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Immunization", "Observation"));
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
@ -513,7 +518,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture());
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture(), nullable(RequestDetails.class));
|
||||
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
|
||||
assertThat(options.getResourceTypes(), containsInAnyOrder("Immunization", "Observation"));
|
||||
|
@ -525,7 +530,7 @@ public class BulkDataExportProviderTest {
|
|||
public void testProviderProcessesNoCacheHeader() throws IOException {
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(), anyBoolean())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.submitJob(any(), anyBoolean(), nullable(RequestDetails.class))).thenReturn(jobInfo);
|
||||
|
||||
|
||||
Parameters input = new Parameters();
|
||||
|
@ -545,7 +550,7 @@ public class BulkDataExportProviderTest {
|
|||
}
|
||||
|
||||
|
||||
verify(myBulkDataExportSvc).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture());
|
||||
verify(myBulkDataExportSvc).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture(), nullable(RequestDetails.class));
|
||||
Boolean usedCache = myBooleanArgumentCaptor.getValue();
|
||||
assertThat(usedCache, is(equalTo(false)));
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
|
@ -23,6 +24,7 @@ import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
|
|||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
@ -44,6 +46,8 @@ import org.hl7.fhir.r4.model.Observation;
|
|||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.Job;
|
||||
|
@ -69,8 +73,13 @@ import static org.hamcrest.Matchers.is;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertSame;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
||||
|
||||
|
@ -162,6 +171,30 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubmit_InterceptorCalled() {
|
||||
IAnonymousInterceptor interceptor = mock(IAnonymousInterceptor.class);
|
||||
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_INITIATE_BULK_EXPORT, interceptor);
|
||||
try {
|
||||
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Observation"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
myBulkDataExportSvc.submitJob(options, true, mySrd);
|
||||
|
||||
ArgumentCaptor<HookParams> paramsCaptor = ArgumentCaptor.forClass(HookParams.class);
|
||||
verify(interceptor, times(1)).invoke(eq(Pointcut.STORAGE_INITIATE_BULK_EXPORT), paramsCaptor.capture());
|
||||
|
||||
HookParams captured = paramsCaptor.getValue();
|
||||
assertSame(options, captured.get(BulkDataExportOptions.class));
|
||||
assertSame(mySrd, captured.get(RequestDetails.class));
|
||||
|
||||
} finally {
|
||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubmit_InvalidOutputFormat() {
|
||||
try {
|
||||
|
@ -1034,23 +1067,23 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
|||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
options.setResourceTypes(Sets.newHashSet("Procedure"));
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo1 = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo2 = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo3 = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo4 = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(options, true, null);
|
||||
IBulkDataExportSvc.JobInfo jobInfo1 = myBulkDataExportSvc.submitJob(options, true, null);
|
||||
IBulkDataExportSvc.JobInfo jobInfo2 = myBulkDataExportSvc.submitJob(options, true, null);
|
||||
IBulkDataExportSvc.JobInfo jobInfo3 = myBulkDataExportSvc.submitJob(options, true, null);
|
||||
IBulkDataExportSvc.JobInfo jobInfo4 = myBulkDataExportSvc.submitJob(options, true, null);
|
||||
|
||||
//Cached should have all identical Job IDs.
|
||||
String initialJobId = jobInfo.getJobId();
|
||||
boolean allMatch = Stream.of(jobInfo, jobInfo1, jobInfo2, jobInfo3, jobInfo4).allMatch(job -> job.getJobId().equals(initialJobId));
|
||||
assertTrue(allMatch);
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo5 = myBulkDataExportSvc.submitJob(options, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo6 = myBulkDataExportSvc.submitJob(options, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo7 = myBulkDataExportSvc.submitJob(options, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo8 = myBulkDataExportSvc.submitJob(options, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo5 = myBulkDataExportSvc.submitJob(options, false, null);
|
||||
IBulkDataExportSvc.JobInfo jobInfo6 = myBulkDataExportSvc.submitJob(options, false, null);
|
||||
IBulkDataExportSvc.JobInfo jobInfo7 = myBulkDataExportSvc.submitJob(options, false, null);
|
||||
IBulkDataExportSvc.JobInfo jobInfo8 = myBulkDataExportSvc.submitJob(options, false, null);
|
||||
Thread.sleep(100L); //stupid commit timings.
|
||||
IBulkDataExportSvc.JobInfo jobInfo9 = myBulkDataExportSvc.submitJob(options, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo9 = myBulkDataExportSvc.submitJob(options, false, null);
|
||||
|
||||
//First non-cached should retrieve new ID.
|
||||
assertThat(initialJobId, is(not(equalTo(jobInfo5.getJobId()))));
|
||||
|
@ -1061,9 +1094,10 @@ public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
|
|||
assertEquals(uniqueJobIds.size(), jobIds.size());
|
||||
|
||||
//Now if we create another one and ask for the cache, we should get the most-recently-insert entry.
|
||||
IBulkDataExportSvc.JobInfo jobInfo10 = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo10 = myBulkDataExportSvc.submitJob(options, true, null);
|
||||
assertThat(jobInfo10.getJobId(), is(equalTo(jobInfo9.getJobId())));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBulkExportWritesToDEFAULTPartitionWhenPartitioningIsEnabled() {
|
||||
myPartitionSettings.setPartitioningEnabled(true);
|
||||
|
|
Loading…
Reference in New Issue