Merge pull request #1747 from jamesagnew/ks-20200303-bulk-export-filter
fixed a copy/paste error in bulk export with test
This commit is contained in:
commit
9b37ba35d0
|
@ -347,14 +347,14 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
||||||
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString());
|
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString());
|
||||||
}
|
}
|
||||||
if (theFilters != null && theFilters.size() > 0) {
|
if (theFilters != null && theFilters.size() > 0) {
|
||||||
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", theFilters));
|
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(String.join(",", theFilters));
|
||||||
}
|
}
|
||||||
String request = requestBuilder.toString();
|
String request = requestBuilder.toString();
|
||||||
|
|
||||||
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
|
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
|
||||||
Pageable page = PageRequest.of(0, 10);
|
Pageable page = PageRequest.of(0, 10);
|
||||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR);
|
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR);
|
||||||
if (existing.isEmpty() == false) {
|
if (!existing.isEmpty()) {
|
||||||
return toSubmittedJobInfo(existing.iterator().next());
|
return toSubmittedJobInfo(existing.iterator().next());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.test.utilities.UnregisterScheduledProcessor;
|
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
@ -23,7 +22,6 @@ import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.test.context.TestPropertySource;
|
|
||||||
|
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
@ -34,6 +32,7 @@ import static org.junit.Assert.*;
|
||||||
public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
||||||
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class);
|
||||||
|
public static final String TEST_FILTER = "Patient?gender=female";
|
||||||
@Autowired
|
@Autowired
|
||||||
private IBulkExportJobDao myBulkExportJobDao;
|
private IBulkExportJobDao myBulkExportJobDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -134,13 +133,13 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
||||||
createResources();
|
createResources();
|
||||||
|
|
||||||
// Create a bulk job
|
// Create a bulk job
|
||||||
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, null);
|
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(null, Sets.newHashSet("Patient", "Observation"), null, Sets.newHashSet(TEST_FILTER));
|
||||||
assertNotNull(jobDetails.getJobId());
|
assertNotNull(jobDetails.getJobId());
|
||||||
|
|
||||||
// Check the status
|
// Check the status
|
||||||
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId());
|
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobStatusOrThrowResourceNotFound(jobDetails.getJobId());
|
||||||
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
|
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus());
|
||||||
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient", status.getRequest());
|
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_typeFilter="+TEST_FILTER, status.getRequest());
|
||||||
|
|
||||||
// Run a scheduled pass to build the export
|
// Run a scheduled pass to build the export
|
||||||
myBulkDataExportSvc.buildExportFiles();
|
myBulkDataExportSvc.buildExportFiles();
|
||||||
|
|
Loading…
Reference in New Issue