Add changelog, test, and implementation (#4055)

This commit is contained in:
Tadgh 2022-09-21 21:18:12 -07:00 committed by GitHub
parent 201cd73e3c
commit 5a978646c5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 113 additions and 6 deletions

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4051
jira: SMILE-5169
title: "Fixed the `$poll-export-status` endpoint so that when a job is complete, this endpoint now correctly includes the `request` and `requiresAccessToken` attributes."

View File

@ -205,8 +205,6 @@ public class BulkDataExportProviderTest {
when(myJobRunner.startNewJob(any()))
.thenReturn(createJobStartResponse());
InstantType now = InstantType.now();
Parameters input = new Parameters();
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
@ -356,6 +354,7 @@ public class BulkDataExportProviderTest {
ids.add(new IdType("Binary/222").getValueAsString());
ids.add(new IdType("Binary/333").getValueAsString());
BulkExportJobResults results = new BulkExportJobResults();
HashMap<String, List<String>> map = new HashMap<>();
map.put("Patient", ids);
results.setResourceTypeToBinaryIds(map);
@ -665,7 +664,6 @@ public class BulkDataExportProviderTest {
ourLog.info("Request: {}", post);
try (CloseableHttpResponse response = myClient.execute(post)) {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.parser.IParser;
@ -37,6 +38,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@ -45,8 +47,10 @@ import java.util.Map;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
@ -59,6 +63,42 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
@Autowired
private IBatch2JobRunner myJobRunner;
@Nested
public class SpecConformanceTests {
@Test
public void testPollingLocationContainsAllRequiredAttributesUponCompletion() throws IOException {
//Given a patient exists
Patient p = new Patient();
p.setId("Pat-1");
myClient.update().resource(p).execute();
//And Given we start a bulk export job
HttpGet httpGet = new HttpGet(myClient.getServerBase() + "/$export?_type=Patient");
httpGet.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
String pollingLocation;
try (CloseableHttpResponse status = ourHttpClient.execute(httpGet)) {
Header[] headers = status.getHeaders("Content-Location");
pollingLocation = headers[0].getValue();
}
String jobId = pollingLocation.substring(pollingLocation.indexOf("_jobId=") + 7);
myBatch2JobHelper.awaitJobCompletion(jobId);
//Then: When the poll shows as complete, all attributes should be filled.
HttpGet statusGet = new HttpGet(pollingLocation);
try (CloseableHttpResponse status = ourHttpClient.execute(statusGet)) {
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
ourLog.info(responseContent);
BulkExportResponseJson result = JsonUtil.deserialize(responseContent, BulkExportResponseJson.class);
assertThat(result.getRequest(), is(equalTo(myClient.getServerBase() + "/$export?_type=Patient")));
assertThat(result.getRequiresAccessToken(), is(equalTo(true)));
assertThat(result.getTransactionTime(), is(notNullValue()));
assertThat(result.getError(), is(empty()));
assertThat(result.getOutput(), is(not(empty())));
}
}
}
@Nested
public class SystemBulkExportTests {
@Test

View File

@ -126,6 +126,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
ourRestServer = new RestfulServer(myFhirContext);
ourRestServer.registerProviders(myResourceProviders.createProviders());
ourRestServer.registerProvider(myBinaryAccessProvider);
ourRestServer.registerProvider(myBulkDataExportProvider);
ourRestServer.getInterceptorService().registerInterceptor(myBinaryStorageInterceptor);
ourRestServer.getFhirContext().setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator());
ourRestServer.setDefaultResponseEncoding(EncodingEnum.XML);

View File

@ -39,6 +39,7 @@ import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkJpaRepository;
@ -263,6 +264,8 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
@Autowired
protected BinaryAccessProvider myBinaryAccessProvider;
@Autowired
protected BulkDataExportProvider myBulkDataExportProvider;
@Autowired
protected BinaryStorageInterceptor myBinaryStorageInterceptor;
@Autowired
protected ApplicationContext myAppCtx;

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.batch2.jobs.export;
import ca.uhn.fhir.batch2.api.ChunkExecutionDetails;
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.IJobInstance;
import ca.uhn.fhir.batch2.api.IReductionStepWorker;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
@ -29,6 +30,7 @@ import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.batch2.model.ChunkOutcome;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
@ -51,6 +53,9 @@ public class BulkExportCreateReportStep implements IReductionStepWorker<BulkExpo
@NotNull IJobDataSink<BulkExportJobResults> theDataSink) throws JobExecutionFailedException {
BulkExportJobResults results = new BulkExportJobResults();
String requestUrl = getOriginatingRequestUrl(theStepExecutionDetails, results);
results.setOriginalRequestUrl(requestUrl);
if (myResourceToBinaryIds != null) {
ourLog.info("Bulk Export Report creation step");
@ -69,6 +74,18 @@ public class BulkExportCreateReportStep implements IReductionStepWorker<BulkExpo
return RunOutcome.SUCCESS;
}
private static String getOriginatingRequestUrl(@NotNull StepExecutionDetails<BulkExportJobParameters, BulkExportBinaryFileId> theStepExecutionDetails, BulkExportJobResults results) {
IJobInstance instance = theStepExecutionDetails.getInstance();
String url = "";
if (instance instanceof JobInstance) {
JobInstance jobInstance = (JobInstance) instance;
BulkExportJobParameters parameters = jobInstance.getParameters(BulkExportJobParameters.class);
String originalRequestUrl = parameters.getOriginalRequestUrl();
url = originalRequestUrl;
}
return url;
}
@NotNull
@Override
public ChunkOutcome consume(ChunkExecutionDetails<BulkExportJobParameters,

View File

@ -59,7 +59,8 @@ public class BulkExportJobParameters extends BulkExportJobBase {
@JsonProperty("patientIds")
private List<String> myPatientIds;
// Stuff for group export only
@JsonProperty("originalRequestUrl")
private String myOriginalRequestUrl;
/**
* The group id
@ -134,6 +135,14 @@ public class BulkExportJobParameters extends BulkExportJobBase {
myExpandMdm = theExpandMdm;
}
private void setOriginalRequestUrl(String theOriginalRequestUrl) {
this.myOriginalRequestUrl = theOriginalRequestUrl;
}
public String getOriginalRequestUrl() {
return myOriginalRequestUrl;
}
public static BulkExportJobParameters createFromExportJobParameters(BulkExportParameters theParameters) {
BulkExportJobParameters params = new BulkExportJobParameters();
params.setResourceTypes(theParameters.getResourceTypes());
@ -144,6 +153,8 @@ public class BulkExportJobParameters extends BulkExportJobBase {
params.setStartDate(theParameters.getStartDate());
params.setExpandMdm(theParameters.isExpandMdm());
params.setPatientIds(theParameters.getPatientIds());
params.setOriginalRequestUrl(theParameters.getOriginalRequestUrl());
return params;
}
}

View File

@ -35,6 +35,9 @@ public class BulkExportJobResults implements IModelJson {
@JsonProperty("reportMessage")
private String myReportMsg;
@JsonProperty("originalRequestUrl")
private String myOriginalRequestUrl;
public BulkExportJobResults() {
}
@ -45,6 +48,14 @@ public class BulkExportJobResults implements IModelJson {
return myResourceTypeToBinaryIds;
}
public String getOriginalRequestUrl() {
return myOriginalRequestUrl;
}
public void setOriginalRequestUrl(String theOriginalRequestUrl) {
myOriginalRequestUrl = theOriginalRequestUrl;
}
public void setResourceTypeToBinaryIds(Map<String, List<String>> theResourceTypeToBinaryIds) {
myResourceTypeToBinaryIds = theResourceTypeToBinaryIds;
}

View File

@ -70,11 +70,17 @@ public class BulkExportParameters extends Batch2BaseJobParameters {
*/
private boolean myExpandMdm;
/**
* Patient id(s)
*/
private List<String> myPatientIds;
/**
* The request which originated the request.
*/
private String myOriginalRequestUrl;
public boolean isExpandMdm() {
return myExpandMdm;
}
@ -145,4 +151,12 @@ public class BulkExportParameters extends Batch2BaseJobParameters {
public void setPatientIds(List<String> thePatientIds) {
myPatientIds = thePatientIds;
}
public String getOriginalRequestUrl() {
return myOriginalRequestUrl;
}
public void setOriginalRequestUrl(String theOriginalRequestUrl) {
myOriginalRequestUrl = theOriginalRequestUrl;
}
}

View File

@ -123,9 +123,14 @@ public class BulkDataExportProvider {
// get cache boolean
boolean useCache = shouldUseCache(theRequestDetails);
BulkExportParameters parameters = BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions);
parameters.setUseExistingJobsFirst(useCache);
// Set the original request URL as part of the job information, as this is used in the poll-status-endpoint, and is needed for the report.
parameters.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
// start job
Batch2JobStartResponse response = myJobRunner.startNewJob(parameters);
@ -282,6 +287,9 @@ public class BulkDataExportProvider {
BulkExportResponseJson bulkResponseDocument = new BulkExportResponseJson();
bulkResponseDocument.setTransactionTime(info.getEndTime()); // completed
bulkResponseDocument.setRequiresAccessToken(true);
String report = info.getReport();
if (isEmpty(report)) {
// this should never happen, but just in case...
@ -289,9 +297,8 @@ public class BulkDataExportProvider {
response.getWriter().close();
} else {
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
// if there is a message....
bulkResponseDocument.setMsg(results.getReportMsg());
bulkResponseDocument.setRequest(results.getOriginalRequestUrl());
String serverBase = getDefaultPartitionServerBase(theRequestDetails);