Add test for cache busting header
This commit is contained in:
parent
827ee33414
commit
412cf003b0
|
@ -37,6 +37,8 @@ public interface IBulkDataExportSvc {
|
|||
|
||||
JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions);
|
||||
|
||||
JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache);
|
||||
|
||||
JobInfo getJobInfoOrThrowResourceNotFound(String theJobId);
|
||||
|
||||
/**
|
||||
|
|
|
@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
|||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
|
@ -44,6 +45,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
|||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import sun.misc.Cache;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.IOException;
|
||||
|
@ -89,10 +91,16 @@ public class BulkDataExportProvider {
|
|||
) {
|
||||
validatePreferAsyncHeader(theRequestDetails);
|
||||
BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter);
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
|
||||
Boolean useCache = shouldUseCache(theRequestDetails);
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, useCache);
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, outcome);
|
||||
}
|
||||
|
||||
private boolean shouldUseCache(ServletRequestDetails theRequestDetails) {
|
||||
CacheControlDirective cacheControlDirective = new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL));
|
||||
return !cacheControlDirective.isNoCache();
|
||||
}
|
||||
|
||||
private String getServerBase(ServletRequestDetails theRequestDetails) {
|
||||
return StringUtils.removeEnd(theRequestDetails.getServerBaseForRequest(), "/");
|
||||
}
|
||||
|
@ -120,7 +128,7 @@ public class BulkDataExportProvider {
|
|||
validatePreferAsyncHeader(theRequestDetails);
|
||||
BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm);
|
||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails));
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, outcome);
|
||||
}
|
||||
|
||||
|
@ -148,7 +156,7 @@ public class BulkDataExportProvider {
|
|||
myBulkDataExportSvc.getPatientCompartmentResources();
|
||||
validatePreferAsyncHeader(theRequestDetails);
|
||||
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter);
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions);
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(bulkDataExportOptions, shouldUseCache(theRequestDetails));
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, outcome);
|
||||
}
|
||||
|
||||
|
|
|
@ -289,6 +289,12 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
@Transactional
|
||||
@Override
|
||||
public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions) {
|
||||
return submitJob(theBulkDataExportOptions, true);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache) {
|
||||
String outputFormat = Constants.CT_FHIR_NDJSON;
|
||||
if (isNotBlank(theBulkDataExportOptions.getOutputFormat())) {
|
||||
outputFormat = theBulkDataExportOptions.getOutputFormat();
|
||||
|
@ -333,11 +339,15 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
|
||||
String request = requestBuilder.toString();
|
||||
|
||||
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
|
||||
Pageable page = PageRequest.of(0, 10);
|
||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR);
|
||||
if (!existing.isEmpty()) {
|
||||
return toSubmittedJobInfo(existing.iterator().next());
|
||||
|
||||
//If we are using the cache, then attempt to retrieve a matching job based on the Request String, otherwise just make a new one.
|
||||
if (useCache) {
|
||||
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
|
||||
Pageable page = PageRequest.of(0, 10);
|
||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR);
|
||||
if (!existing.isEmpty()) {
|
||||
return toSubmittedJobInfo(existing.iterator().next());
|
||||
}
|
||||
}
|
||||
|
||||
if (resourceTypes != null && resourceTypes.contains("Binary")) {
|
||||
|
|
|
@ -43,7 +43,7 @@ public interface IBulkExportJobDao extends JpaRepository<BulkExportJobEntity, Lo
|
|||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff")
|
||||
Slice<BulkExportJobEntity> findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status")
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC")
|
||||
Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus);
|
||||
|
||||
@Modifying
|
||||
|
|
|
@ -56,6 +56,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
|||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyBoolean;
|
||||
import static org.mockito.ArgumentMatchers.isNull;
|
||||
import static org.mockito.Mockito.eq;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
@ -77,6 +78,8 @@ public class BulkDataExportProviderTest {
|
|||
private CloseableHttpClient myClient;
|
||||
@Captor
|
||||
private ArgumentCaptor<BulkDataExportOptions> myBulkDataExportOptionsCaptor;
|
||||
@Captor
|
||||
private ArgumentCaptor<Boolean> myBooleanArgumentCaptor;
|
||||
|
||||
@AfterEach
|
||||
public void after() throws Exception {
|
||||
|
@ -336,6 +339,43 @@ public class BulkDataExportProviderTest {
|
|||
assertThat(options.isExpandMdm(), is(equalTo(true)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException {
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo);
|
||||
when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Patient", "Practitioner"));
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
String url = "http://localhost:" + myPort + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT
|
||||
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
|
||||
+ "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner")
|
||||
+ "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString())
|
||||
+ "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?identifier=foo|bar")
|
||||
+ "&" + JpaConstants.PARAM_EXPORT_MDM+ "=true";
|
||||
|
||||
HttpGet get = new HttpGet(url);
|
||||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
ourLog.info("Request: {}", url);
|
||||
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||
ourLog.info("Response: {}", response.toString());
|
||||
|
||||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture());
|
||||
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
|
||||
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
|
||||
assertThat(options.getSince(), notNullValue());
|
||||
assertThat(options.getFilters(), notNullValue());
|
||||
assertEquals(GROUP_ID, options.getGroupId().getValue());
|
||||
assertThat(options.isExpandMdm(), is(equalTo(true)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInitiateWithGetAndMultipleTypeFilters() throws IOException {
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
|
@ -461,4 +501,34 @@ public class BulkDataExportProviderTest {
|
|||
assertThat(options.getSince(), notNullValue());
|
||||
assertThat(options.getFilters(), containsInAnyOrder("Immunization?vaccine-code=foo"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProviderProcessesNoCacheHeader() throws IOException {
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID);
|
||||
when(myBulkDataExportSvc.submitJob(any(), anyBoolean())).thenReturn(jobInfo);
|
||||
|
||||
|
||||
Parameters input = new Parameters();
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner"));
|
||||
|
||||
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
|
||||
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
post.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_NO_CACHE);
|
||||
post.setEntity(new ResourceEntity(myCtx, input));
|
||||
ourLog.info("Request: {}", post);
|
||||
try (CloseableHttpResponse response = myClient.execute(post)) {
|
||||
ourLog.info("Response: {}", response.toString());
|
||||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
|
||||
verify(myBulkDataExportSvc).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture());
|
||||
Boolean usedCache = myBooleanArgumentCaptor.getValue();
|
||||
assertThat(usedCache, is(equalTo(false)));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -61,6 +61,7 @@ import java.util.Set;
|
|||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
@ -70,6 +71,7 @@ import static org.hamcrest.Matchers.is;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
||||
|
@ -974,6 +976,41 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCacheSettingIsRespectedWhenCreatingNewJobs() {
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
options.setResourceTypes(Sets.newHashSet("Procedure"));
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo1 = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo2 = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo3 = myBulkDataExportSvc.submitJob(options, true);
|
||||
IBulkDataExportSvc.JobInfo jobInfo4 = myBulkDataExportSvc.submitJob(options, true);
|
||||
|
||||
//Cached should have all identical Job IDs.
|
||||
String initialJobId = jobInfo.getJobId();
|
||||
boolean allMatch = Stream.of(jobInfo, jobInfo1, jobInfo2, jobInfo3, jobInfo4).allMatch(job -> job.getJobId().equals(initialJobId));
|
||||
assertTrue(allMatch);
|
||||
|
||||
BulkDataExportOptions options2 = new BulkDataExportOptions();
|
||||
options2.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
options2.setResourceTypes(Sets.newHashSet("Procedure"));
|
||||
IBulkDataExportSvc.JobInfo jobInfo5 = myBulkDataExportSvc.submitJob(options2, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo6 = myBulkDataExportSvc.submitJob(options2, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo7 = myBulkDataExportSvc.submitJob(options2, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo8 = myBulkDataExportSvc.submitJob(options2, false);
|
||||
IBulkDataExportSvc.JobInfo jobInfo9 = myBulkDataExportSvc.submitJob(options2, false);
|
||||
|
||||
//First non-cached should retrieve new ID.
|
||||
assertThat(initialJobId, is(not(equalTo(jobInfo5.getJobId()))));
|
||||
|
||||
//Non-cached should all have unique IDs
|
||||
List<String> jobIds = Stream.of(jobInfo5, jobInfo6, jobInfo7, jobInfo8, jobInfo9).map(IBulkDataExportSvc.JobInfo::getJobId).collect(Collectors.toList());
|
||||
ourLog.info("ZOOP {}", String.join(", ", jobIds));
|
||||
Set<String> uniqueJobIds = new HashSet<>(jobIds);
|
||||
assertEquals(uniqueJobIds.size(), jobIds.size());
|
||||
}
|
||||
|
||||
private void awaitJobCompletion(JobExecution theJobExecution) {
|
||||
await().atMost(120, TimeUnit.SECONDS).until(() -> {
|
||||
JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId());
|
||||
|
|
Loading…
Reference in New Issue