Bulk export partitioning fixes (#5039)
* Work * Bulk export partitioning fixes * Changelog fixes * cleanup * Compile fixes * Test fix * Add JIRA note * Version bump * Drop unneeded tag
This commit is contained in:
parent
ed04dd2498
commit
6e0651e261
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1101,10 +1101,47 @@ public enum Pointcut implements IPointcut {
|
|||
*/
|
||||
STORAGE_INITIATE_BULK_EXPORT(
|
||||
void.class,
|
||||
"ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions",
|
||||
"ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"
|
||||
),
|
||||
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked when a Bulk Export job is being processed. If any hook method is registered
|
||||
* for this pointcut, the hook method will be called once for each resource that is
|
||||
* loaded for inclusion in a bulk export file. Hook methods may modify
|
||||
* the resource object and this modification will affect the copy that is stored in the
|
||||
* bulk export data file (but will not affect the original). Hook methods may also
|
||||
* return <code>false</code> in order to request that the resource be filtered
|
||||
* from the export.
|
||||
* <p>
|
||||
* Hooks may accept the following parameters:
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters - The details of the job being kicked off
|
||||
* </li>
|
||||
* <li>
|
||||
*org.hl7.fhir.instance.model.api.IBaseResource - The resource that will be included in the file
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks methods may return <code>false</code> to indicate that the resource should be
|
||||
* filtered out. Otherwise, hook methods should return <code>true</code>.
|
||||
* </p>
|
||||
*
|
||||
* @since 6.8.0
|
||||
*/
|
||||
STORAGE_BULK_EXPORT_RESOURCE_INCLUSION(
|
||||
boolean.class,
|
||||
"ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters",
|
||||
"org.hl7.fhir.instance.model.api.IBaseResource"
|
||||
),
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked when a set of resources are about to be deleted and expunged via url like http://localhost/Patient?active=false&_expunge=true
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: change
|
||||
issue: 5039
|
||||
title: "A new interceptor pointcut called `STORAGE_BULK_EXPORT_RESOURCE_INCLUSION` has been added. This
|
||||
pointcut is called for every resource that will be included in a bulk export data file. Hook methods
|
||||
may modify the resources or signal that they should be filtered out."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: change
|
||||
issue: 5039
|
||||
title: "Bulk export fetch operations now properly account for the partition being searched, which
|
||||
should improve performance in some cases on partitioned systems."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: change
|
||||
issue: 5039
|
||||
title: "The class `BulkDataExportOptions` has been removed and replaced with an equivalent class called
|
||||
`BulkExportJobParameters`. This change is a breaking change unfortunately, but should be easily remedied
|
||||
as the replacement class has a very similar signature. This change reduces a large amount of duplication
|
||||
in the bulk export codebase and should make for more maintainable code in the future."
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-jpa
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.nickname;
|
||||
|
||||
import org.springframework.core.io.Resource;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -48,7 +48,7 @@ import ca.uhn.fhir.mdm.model.MdmPidTuple;
|
|||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.param.HasOrListParam;
|
||||
import ca.uhn.fhir.rest.param.HasParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
|
@ -125,7 +125,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
|
|||
@Override
|
||||
public Iterator<JpaPid> getResourcePidIterator(ExportPIDIteratorParameters theParams) {
|
||||
return myHapiTransactionService
|
||||
.withRequest(null)
|
||||
.withSystemRequest()
|
||||
.withRequestPartitionId(theParams.getPartitionIdOrAllPartitions())
|
||||
.readOnly()
|
||||
.execute(() -> {
|
||||
String resourceType = theParams.getResourceType();
|
||||
|
@ -134,9 +135,9 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
|
|||
RuntimeResourceDefinition def = myContext.getResourceDefinition(resourceType);
|
||||
|
||||
LinkedHashSet<JpaPid> pids;
|
||||
if (theParams.getExportStyle() == BulkDataExportOptions.ExportStyle.PATIENT) {
|
||||
if (theParams.getExportStyle() == BulkExportJobParameters.ExportStyle.PATIENT) {
|
||||
pids = getPidsForPatientStyleExport(theParams, resourceType, jobId, chunkId, def);
|
||||
} else if (theParams.getExportStyle() == BulkDataExportOptions.ExportStyle.GROUP) {
|
||||
} else if (theParams.getExportStyle() == BulkExportJobParameters.ExportStyle.GROUP) {
|
||||
pids = getPidsForGroupStyleExport(theParams, resourceType, def);
|
||||
} else {
|
||||
pids = getPidsForSystemStyleExport(theParams, jobId, chunkId, def);
|
||||
|
|
|
@ -37,7 +37,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchUrlJobMaintenanceSvc;
|
|||
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
|
||||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportHelperService;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface IBulkExportJobDao extends JpaRepository<BulkExportJobEntity, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BulkExportJobEntity t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
}
|
|
@ -19,8 +19,6 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hl7.fhir.r5.model.InstantType;
|
||||
|
@ -80,9 +78,8 @@ public class BulkExportJobEntity implements Serializable {
|
|||
@Column(name = JOB_ID, length = UUID_LENGTH, nullable = false)
|
||||
private String myJobId;
|
||||
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Column(name = "JOB_STATUS", length = 10, nullable = false)
|
||||
private BulkExportJobStatusEnum myStatus;
|
||||
private String myStatus;
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "CREATED_TIME", nullable = false)
|
||||
private Date myCreated;
|
||||
|
@ -168,11 +165,11 @@ public class BulkExportJobEntity implements Serializable {
|
|||
return b.toString();
|
||||
}
|
||||
|
||||
public BulkExportJobStatusEnum getStatus() {
|
||||
public String getStatus() {
|
||||
return myStatus;
|
||||
}
|
||||
|
||||
public void setStatus(BulkExportJobStatusEnum theStatus) {
|
||||
public void setStatus(String theStatus) {
|
||||
if (myStatus != theStatus) {
|
||||
myStatusTime = new Date();
|
||||
myStatus = theStatus;
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.packages;
|
||||
|
||||
import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
|
||||
import ca.uhn.fhir.jpa.util.JsonDateSerializer;
|
||||
import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
|
||||
import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
|
||||
import com.fasterxml.jackson.annotation.JsonAutoDetect;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
|
|
@ -32,25 +32,25 @@ import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.util.DateRangeUtil;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
|
||||
public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
||||
|
@ -80,7 +80,7 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
|||
return myTransactionService
|
||||
.withSystemRequest()
|
||||
.withRequestPartitionId(theRequestPartitionId)
|
||||
.execute(()->{
|
||||
.execute(() -> {
|
||||
if (theUrl == null) {
|
||||
return fetchResourceIdsPageNoUrl(theStart, theEnd, thePageSize, theRequestPartitionId);
|
||||
} else {
|
||||
|
|
|
@ -25,7 +25,7 @@ import ca.uhn.fhir.mdm.model.MdmPidTuple;
|
|||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Group;
|
||||
|
@ -145,11 +145,11 @@ public class JpaBulkExportProcessorTest {
|
|||
@InjectMocks
|
||||
private JpaBulkExportProcessor myProcessor;
|
||||
|
||||
private ExportPIDIteratorParameters createExportParameters(BulkDataExportOptions.ExportStyle theExportStyle) {
|
||||
private ExportPIDIteratorParameters createExportParameters(BulkExportJobParameters.ExportStyle theExportStyle) {
|
||||
ExportPIDIteratorParameters parameters = new ExportPIDIteratorParameters();
|
||||
parameters.setInstanceId("instanceId");
|
||||
parameters.setExportStyle(theExportStyle);
|
||||
if (theExportStyle == BulkDataExportOptions.ExportStyle.GROUP) {
|
||||
if (theExportStyle == BulkExportJobParameters.ExportStyle.GROUP) {
|
||||
parameters.setGroupId("123");
|
||||
}
|
||||
parameters.setStartDate(new Date());
|
||||
|
@ -176,7 +176,7 @@ public class JpaBulkExportProcessorTest {
|
|||
@ValueSource(booleans = {true, false})
|
||||
public void getResourcePidIterator_paramsWithPatientExportStyle_returnsAnIterator(boolean thePartitioned) {
|
||||
// setup
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
parameters.setResourceType("Patient");
|
||||
|
||||
parameters.setPartitionId(getPartitionIdFromParams(thePartitioned));
|
||||
|
@ -242,7 +242,7 @@ public class JpaBulkExportProcessorTest {
|
|||
@Test
|
||||
public void getResourcePidIterator_patientStyleWithIndexMissingFieldsDisabled_throws() {
|
||||
// setup
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
parameters.setResourceType("Patient");
|
||||
|
||||
// when
|
||||
|
@ -262,7 +262,7 @@ public class JpaBulkExportProcessorTest {
|
|||
@CsvSource({"false, false", "false, true", "true, true", "true, false"})
|
||||
public void getResourcePidIterator_groupExportStyleWithPatientResource_returnsIterator(boolean theMdm, boolean thePartitioned) {
|
||||
// setup
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
parameters.setResourceType("Patient");
|
||||
|
||||
JpaPid groupId = JpaPid.fromId(Long.parseLong(parameters.getGroupId()));
|
||||
|
@ -370,7 +370,7 @@ public class JpaBulkExportProcessorTest {
|
|||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public void getResourcePidIterator_groupExportStyleWithNonPatientResource_returnsIterator(boolean theMdm, boolean thePartitioned) {
|
||||
// setup
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
parameters.setResourceType("Observation");
|
||||
|
||||
JpaPid groupId = JpaPid.fromId(Long.parseLong(parameters.getGroupId()));
|
||||
|
@ -484,7 +484,7 @@ public class JpaBulkExportProcessorTest {
|
|||
@ValueSource(booleans = {true, false})
|
||||
public void getResourcePidIterator_systemExport_returnsIterator(boolean thePartitioned) {
|
||||
// setup
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
parameters.setResourceType("Patient");
|
||||
|
||||
parameters.setPartitionId(getPartitionIdFromParams(thePartitioned));
|
||||
|
@ -540,7 +540,7 @@ public class JpaBulkExportProcessorTest {
|
|||
@ValueSource(booleans = {true, false})
|
||||
public void getResourcePidIterator_groupExportStyleWithGroupResource_returnsAnIterator(boolean thePartitioned) {
|
||||
// setup
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
ExportPIDIteratorParameters parameters = createExportParameters(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
parameters.setResourceType("Group");
|
||||
|
||||
parameters.setPartitionId(getPartitionIdFromParams(thePartitioned));
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.util.BulkExportUtils;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -45,12 +45,13 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
|
|||
public class BulkGroupExportWithIndexedSearchParametersTest extends BaseJpaTest {
|
||||
|
||||
private final FhirContext myCtx = FhirContext.forR4Cached();
|
||||
@Autowired private IBatch2JobRunner myJobRunner;
|
||||
@Autowired private PlatformTransactionManager myTxManager;
|
||||
@Autowired
|
||||
@Qualifier("mySystemDaoR4")
|
||||
protected IFhirSystemDao<Bundle, Meta> mySystemDao;
|
||||
|
||||
@Autowired
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
|
@ -72,28 +73,32 @@ public class BulkGroupExportWithIndexedSearchParametersTest extends BaseJpaTest
|
|||
mySystemDao.transaction(mySrd, inputBundle);
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Set.of("Patient", "Observation", "Group"));
|
||||
options.setGroupId(new IdType("Group", "G1"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setGroupId("Group/G1");
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
BulkExportJobResults jobResults = getBulkExportJobResults(options);
|
||||
assertThat(jobResults.getResourceTypeToBinaryIds().keySet(), containsInAnyOrder("Patient", "Observation", "Group"));
|
||||
}
|
||||
|
||||
private BulkExportJobResults getBulkExportJobResults(BulkDataExportOptions theOptions) {
|
||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
|
||||
private BulkExportJobResults getBulkExportJobResults(BulkExportJobParameters theOptions) {
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(theOptions);
|
||||
|
||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
|
||||
assertNotNull(startResponse);
|
||||
|
||||
// Run a scheduled pass to build the export
|
||||
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId());
|
||||
|
||||
await().until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
|
||||
await().until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport() != null);
|
||||
|
||||
// Iterate over the files
|
||||
String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
|
||||
String report = myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport();
|
||||
return JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
package ca.uhn.fhir.jpa.batch2;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.util.BulkExportUtils;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.hl7.fhir.r4.model.Binary;
|
||||
|
@ -34,7 +34,6 @@ import java.util.Collections;
|
|||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
@ -61,7 +60,7 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataErrorAbuseTest.class);
|
||||
|
||||
@Autowired
|
||||
private IBatch2JobRunner myJobRunner;
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
|
@ -122,11 +121,11 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(group).execute();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setGroupId(new IdType("Group", "G2"));
|
||||
options.setGroupId("Group/G2");
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
BlockingQueue<Runnable> workQueue = new LinkedBlockingQueue<>();
|
||||
|
@ -184,7 +183,7 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
|
|||
|
||||
private void verifyBulkExportResults(String theInstanceId, List<String> theContainedList, List<String> theExcludedList) {
|
||||
// Iterate over the files
|
||||
Batch2JobInfo jobInfo = myJobRunner.getJobInfo(theInstanceId);
|
||||
JobInstance jobInfo = myJobCoordinator.getInstance(theInstanceId);
|
||||
String report = jobInfo.getReport();
|
||||
ourLog.debug("Export job {} report: {}", theInstanceId, report);
|
||||
if (!theContainedList.isEmpty()) {
|
||||
|
@ -237,10 +236,12 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
|
|||
ourLog.info("Job {} ok", theInstanceId);
|
||||
}
|
||||
|
||||
private String startJob(BulkDataExportOptions theOptions) {
|
||||
BulkExportParameters startRequest = BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions);
|
||||
startRequest.setUseExistingJobsFirst(false);
|
||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(null, startRequest);
|
||||
private String startJob(BulkExportJobParameters theOptions) {
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setUseCache(false);
|
||||
startRequest.setParameters(theOptions);
|
||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
assertNotNull(startResponse);
|
||||
return startResponse.getInstanceId();
|
||||
}
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.api.JobOperationResultJson;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
||||
import ca.uhn.fhir.rest.server.HardcodedServerAddressStrategy;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
|
@ -66,15 +66,16 @@ import java.util.Set;
|
|||
import java.util.stream.Stream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.junit.jupiter.api.Assertions.assertAll;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
|
@ -82,7 +83,6 @@ import static org.mockito.ArgumentMatchers.anyString;
|
|||
import static org.mockito.ArgumentMatchers.isNotNull;
|
||||
import static org.mockito.Mockito.eq;
|
||||
import static org.mockito.Mockito.lenient;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
@ -98,10 +98,13 @@ public class BulkDataExportProviderTest {
|
|||
private final FhirContext myCtx = FhirContext.forR4Cached();
|
||||
@RegisterExtension
|
||||
private final HttpClientExtension myClient = new HttpClientExtension();
|
||||
@Mock
|
||||
private IBatch2JobRunner myJobRunner;
|
||||
private final RequestPartitionId myRequestPartitionId = RequestPartitionId.fromPartitionIdAndName(123, "Partition-A");
|
||||
private final String myPartitionName = "Partition-A";
|
||||
private final String myFixedBaseUrl = "http:/myfixedbaseurl.com";
|
||||
@Mock
|
||||
IFhirResourceDao myFhirResourceDao;
|
||||
@Mock
|
||||
IJobCoordinator myJobCoordinator;
|
||||
@InjectMocks
|
||||
private BulkDataExportProvider myProvider;
|
||||
@RegisterExtension
|
||||
|
@ -109,44 +112,18 @@ public class BulkDataExportProviderTest {
|
|||
.withServer(s -> s.registerProvider(myProvider));
|
||||
@Spy
|
||||
private RequestPartitionHelperSvc myRequestPartitionHelperSvc = new MyRequestPartitionHelperSvc();
|
||||
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
private final RequestPartitionId myRequestPartitionId = RequestPartitionId.fromPartitionIdAndName(123, "Partition-A");
|
||||
|
||||
private final String myPartitionName = "Partition-A";
|
||||
|
||||
private final String myFixedBaseUrl = "http:/myfixedbaseurl.com";
|
||||
|
||||
private class MyRequestPartitionHelperSvc extends RequestPartitionHelperSvc {
|
||||
@Override
|
||||
public @NotNull RequestPartitionId determineReadPartitionForRequest(RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails) {
|
||||
assert theRequest != null;
|
||||
if (myPartitionName.equals(theRequest.getTenantId())) {
|
||||
return myRequestPartitionId;
|
||||
} else {
|
||||
return RequestPartitionId.fromPartitionName(theRequest.getTenantId());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {
|
||||
if (!myPartitionName.equals(theRequest.getTenantId()) && theRequest.getTenantId() != null) {
|
||||
throw new ForbiddenOperationException("User does not have access to resources on the requested partition");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@Mock
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
@BeforeEach
|
||||
public void injectStorageSettings() {
|
||||
myStorageSettings = new JpaStorageSettings();
|
||||
myProvider.setStorageSettings(myStorageSettings);
|
||||
DaoRegistry daoRegistry = mock(DaoRegistry.class);
|
||||
lenient().when(daoRegistry.getRegisteredDaoTypes()).thenReturn(Set.of("Patient", "Observation", "Encounter"));
|
||||
lenient().when(myDaoRegistry.getRegisteredDaoTypes()).thenReturn(Set.of("Patient", "Observation", "Encounter", "Group", "Device", "DiagnosticReport"));
|
||||
|
||||
lenient().when(daoRegistry.getResourceDao(anyString())).thenReturn(myFhirResourceDao);
|
||||
myProvider.setDaoRegistry(daoRegistry);
|
||||
lenient().when(myDaoRegistry.getResourceDao(anyString())).thenReturn(myFhirResourceDao);
|
||||
myProvider.setDaoRegistry(myDaoRegistry);
|
||||
|
||||
}
|
||||
|
||||
|
@ -159,12 +136,15 @@ public class BulkDataExportProviderTest {
|
|||
myServer.getRestfulServer().setTenantIdentificationStrategy(new UrlBaseTenantIdentificationStrategy());
|
||||
}
|
||||
|
||||
private BulkExportParameters verifyJobStart() {
|
||||
ArgumentCaptor<Batch2BaseJobParameters> startJobCaptor = ArgumentCaptor.forClass(Batch2BaseJobParameters.class);
|
||||
verify(myJobRunner).startNewJob(isNotNull(), startJobCaptor.capture());
|
||||
Batch2BaseJobParameters sp = startJobCaptor.getValue();
|
||||
assertTrue(sp instanceof BulkExportParameters);
|
||||
return (BulkExportParameters) sp;
|
||||
private JobInstanceStartRequest verifyJobStart() {
|
||||
ArgumentCaptor<JobInstanceStartRequest> startJobCaptor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
||||
verify(myJobCoordinator).startInstance(isNotNull(), startJobCaptor.capture());
|
||||
JobInstanceStartRequest sp = startJobCaptor.getValue();
|
||||
return sp;
|
||||
}
|
||||
|
||||
private BulkExportJobParameters verifyJobStartAndReturnParameters() {
|
||||
return verifyJobStart().getParameters(BulkExportJobParameters.class);
|
||||
}
|
||||
|
||||
private Batch2JobStartResponse createJobStartResponse(String theJobId) {
|
||||
|
@ -198,8 +178,7 @@ public class BulkDataExportProviderTest {
|
|||
String practitionerResource = "Practitioner";
|
||||
String filter = "Patient?identifier=foo";
|
||||
String postFetchFilter = "Patient?_tag=foo";
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
.thenReturn(createJobStartResponse());
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse());
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
|
@ -229,7 +208,7 @@ public class BulkDataExportProviderTest {
|
|||
baseUrl = myServer.getBaseUrl();
|
||||
}
|
||||
|
||||
if(partitioningEnabled) {
|
||||
if (partitioningEnabled) {
|
||||
baseUrl = baseUrl + "/" + myPartitionName;
|
||||
}
|
||||
|
||||
|
@ -238,7 +217,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(baseUrl + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
BulkExportParameters params = verifyJobStart();
|
||||
BulkExportJobParameters params = verifyJobStartAndReturnParameters();
|
||||
assertEquals(2, params.getResourceTypes().size());
|
||||
assertTrue(params.getResourceTypes().contains(patientResource));
|
||||
assertTrue(params.getResourceTypes().contains(practitionerResource));
|
||||
|
@ -250,7 +229,7 @@ public class BulkDataExportProviderTest {
|
|||
|
||||
@Test
|
||||
public void testOmittingOutputFormatDefaultsToNdjson() throws IOException {
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(createJobStartResponse());
|
||||
|
||||
Parameters input = new Parameters();
|
||||
|
@ -262,7 +241,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
}
|
||||
|
||||
BulkExportParameters params = verifyJobStart();
|
||||
BulkExportJobParameters params = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
|
||||
|
||||
|
||||
|
@ -271,7 +250,7 @@ public class BulkDataExportProviderTest {
|
|||
@ParameterizedTest
|
||||
@MethodSource("paramsProvider")
|
||||
public void testSuccessfulInitiateBulkRequest_GetWithPartitioning(boolean partitioningEnabled) throws IOException {
|
||||
when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse());
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse());
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
|
@ -299,7 +278,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(myBaseUrl + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
BulkExportParameters params = verifyJobStart();
|
||||
BulkExportJobParameters params = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
|
||||
assertThat(params.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
|
||||
assertThat(params.getSince(), notNullValue());
|
||||
|
@ -308,7 +287,7 @@ public class BulkDataExportProviderTest {
|
|||
|
||||
@Test
|
||||
public void testSuccessfulInitiateBulkRequest_Get_MultipleTypeFilters() throws IOException {
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(createJobStartResponse());
|
||||
|
||||
String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT
|
||||
|
@ -328,7 +307,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
BulkExportParameters params = verifyJobStart();
|
||||
BulkExportJobParameters params = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
|
||||
assertThat(params.getResourceTypes(), containsInAnyOrder("Patient", "EpisodeOfCare"));
|
||||
assertThat(params.getSince(), nullValue());
|
||||
|
@ -338,13 +317,16 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testPollForStatus_QUEUED() throws IOException {
|
||||
// setup
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.BUILDING);
|
||||
JobInstance info = new JobInstance();
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.QUEUED);
|
||||
info.setEndTime(new Date());
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
info.setParameters(parameters);
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
|
||||
// test
|
||||
|
@ -364,16 +346,19 @@ public class BulkDataExportProviderTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testPollForStatus_ERROR() throws IOException {
|
||||
public void testPollForStatus_Failed() throws IOException {
|
||||
// setup
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.ERROR);
|
||||
JobInstance info = new JobInstance();
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.FAILED);
|
||||
info.setStartTime(new Date());
|
||||
info.setErrorMsg("Some Error Message");
|
||||
info.setErrorMessage("Some Error Message");
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
info.setParameters(parameters);
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
|
||||
// call
|
||||
|
@ -410,9 +395,9 @@ public class BulkDataExportProviderTest {
|
|||
myBaseUriForExport = myServer.getBaseUrl();
|
||||
}
|
||||
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.COMPLETE);
|
||||
JobInstance info = new JobInstance();
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.COMPLETED);
|
||||
info.setEndTime(InstantType.now().getValue());
|
||||
ArrayList<String> ids = new ArrayList<>();
|
||||
ids.add(new IdType("Binary/111").getValueAsString());
|
||||
|
@ -424,12 +409,15 @@ public class BulkDataExportProviderTest {
|
|||
map.put("Patient", ids);
|
||||
results.setResourceTypeToBinaryIds(map);
|
||||
info.setReport(JsonUtil.serialize(results));
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
if (partitioningEnabled) {
|
||||
info.setRequestPartitionId(myRequestPartitionId);
|
||||
parameters.setPartitionId(myRequestPartitionId);
|
||||
}
|
||||
info.setParameters(parameters);
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
|
||||
// call
|
||||
|
@ -450,7 +438,7 @@ public class BulkDataExportProviderTest {
|
|||
}
|
||||
if (partitioningEnabled) {
|
||||
// If partitioning is enabled, then the URLs in the poll response should also have the partition name.
|
||||
myBaseUriForPoll = myBaseUriForPoll + "/"+ myPartitionName;
|
||||
myBaseUriForPoll = myBaseUriForPoll + "/" + myPartitionName;
|
||||
}
|
||||
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
|
@ -476,11 +464,15 @@ public class BulkDataExportProviderTest {
|
|||
// setup
|
||||
enablePartitioning();
|
||||
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.COMPLETE);
|
||||
JobInstance info = new JobInstance();
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.COMPLETED);
|
||||
info.setEndTime(InstantType.now().getValue());
|
||||
info.setRequestPartitionId(myRequestPartitionId);
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
parameters.setPartitionId(myRequestPartitionId);
|
||||
info.setParameters(parameters);
|
||||
|
||||
ArrayList<String> ids = new ArrayList<>();
|
||||
ids.add(new IdType("Binary/111").getValueAsString());
|
||||
ids.add(new IdType("Binary/222").getValueAsString());
|
||||
|
@ -493,7 +485,7 @@ public class BulkDataExportProviderTest {
|
|||
info.setReport(JsonUtil.serialize(results));
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
|
||||
// call
|
||||
|
@ -514,10 +506,14 @@ public class BulkDataExportProviderTest {
|
|||
public void testExportWhenNoResourcesReturned() throws IOException {
|
||||
// setup
|
||||
String msg = "Some msg";
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.COMPLETE);
|
||||
JobInstance info = new JobInstance();
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.COMPLETED);
|
||||
info.setEndTime(InstantType.now().getValue());
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
info.setParameters(parameters);
|
||||
|
||||
ArrayList<String> ids = new ArrayList<>();
|
||||
BulkExportJobResults results = new BulkExportJobResults();
|
||||
HashMap<String, List<String>> map = new HashMap<>();
|
||||
|
@ -527,7 +523,7 @@ public class BulkDataExportProviderTest {
|
|||
info.setReport(JsonUtil.serialize(results));
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
|
||||
// test
|
||||
|
@ -554,7 +550,7 @@ public class BulkDataExportProviderTest {
|
|||
// setup
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(anyString()))
|
||||
when(myJobCoordinator.getInstance(anyString()))
|
||||
.thenThrow(new ResourceNotFoundException("Unknown job: AAA"));
|
||||
|
||||
String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
|
||||
|
@ -584,7 +580,7 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException {
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(createJobStartResponse(G_JOB_ID));
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
@ -612,7 +608,7 @@ public class BulkDataExportProviderTest {
|
|||
}
|
||||
|
||||
// verify
|
||||
BulkExportParameters bp = verifyJobStart();
|
||||
BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
|
||||
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
|
||||
assertThat(bp.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport"));
|
||||
|
@ -625,7 +621,7 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException {
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse(G_JOB_ID));
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse(G_JOB_ID));
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
|
@ -648,7 +644,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
BulkExportParameters bp = verifyJobStart();
|
||||
BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
|
||||
assertThat(bp.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
|
||||
assertThat(bp.getSince(), notNullValue());
|
||||
|
@ -657,6 +653,40 @@ public class BulkDataExportProviderTest {
|
|||
assertThat(bp.isExpandMdm(), is(equalTo(true)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuccessfulInitiateGroupBulkRequest_Get_SomeTypesDisabled() throws IOException {
|
||||
// when
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse(G_JOB_ID));
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
String url = myServer.getBaseUrl() + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT
|
||||
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
|
||||
+ "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString());
|
||||
|
||||
// call
|
||||
HttpGet get = new HttpGet(url);
|
||||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
ourLog.info("Request: {}", url);
|
||||
try (CloseableHttpResponse response = myClient.execute(get)) {
|
||||
ourLog.info("Response: {}", response.toString());
|
||||
|
||||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
|
||||
assertThat(bp.getResourceTypes().toString(), bp.getResourceTypes(), containsInAnyOrder(
|
||||
"DiagnosticReport", "Group", "Observation", "Device", "Patient", "Encounter"
|
||||
));
|
||||
assertThat(bp.getSince(), notNullValue());
|
||||
assertThat(bp.getFilters(), notNullValue());
|
||||
assertEquals(GROUP_ID, bp.getGroupId());
|
||||
assertThat(bp.isExpandMdm(), is(equalTo(false)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInitiateWithGetAndMultipleTypeFilters() throws IOException {
|
||||
// setup
|
||||
|
@ -687,7 +717,7 @@ public class BulkDataExportProviderTest {
|
|||
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
|
||||
try (CloseableHttpResponse ignored = myClient.execute(get)) {
|
||||
// verify
|
||||
BulkExportParameters bp = verifyJobStart();
|
||||
BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
|
||||
assertThat(bp.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1));
|
||||
}
|
||||
}
|
||||
|
@ -714,8 +744,7 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testInitiateGroupExportWithNoResourceTypes() throws IOException {
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
|
||||
.thenReturn(createJobStartResponse());
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse());
|
||||
|
||||
// test
|
||||
String url = myServer.getBaseUrl() + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT
|
||||
|
@ -727,12 +756,12 @@ public class BulkDataExportProviderTest {
|
|||
|
||||
// verify
|
||||
assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(202)));
|
||||
final BulkExportParameters bulkExportParameters = verifyJobStart();
|
||||
final BulkExportJobParameters BulkExportJobParameters = verifyJobStartAndReturnParameters();
|
||||
|
||||
assertAll(
|
||||
() -> assertTrue(bulkExportParameters.getResourceTypes().contains("Patient")),
|
||||
() -> assertTrue(bulkExportParameters.getResourceTypes().contains("Group")),
|
||||
() -> assertTrue(bulkExportParameters.getResourceTypes().contains("Device"))
|
||||
() -> assertTrue(BulkExportJobParameters.getResourceTypes().contains("Patient")),
|
||||
() -> assertTrue(BulkExportJobParameters.getResourceTypes().contains("Group")),
|
||||
() -> assertTrue(BulkExportJobParameters.getResourceTypes().contains("Device"))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -740,7 +769,7 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testInitiateWithPostAndMultipleTypeFilters() throws IOException {
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any())).thenReturn(createJobStartResponse());
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any())).thenReturn(createJobStartResponse());
|
||||
|
||||
Parameters input = new Parameters();
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
|
||||
|
@ -763,7 +792,7 @@ public class BulkDataExportProviderTest {
|
|||
}
|
||||
|
||||
// verify
|
||||
BulkExportParameters bp = verifyJobStart();
|
||||
BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
|
||||
assertThat(bp.getResourceTypes(), containsInAnyOrder("Patient"));
|
||||
assertThat(bp.getFilters(), containsInAnyOrder("Patient?gender=male", "Patient?gender=female"));
|
||||
|
@ -772,7 +801,7 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testInitiateBulkExportOnPatient_noTypeParam_addsTypeBeforeBulkExport() throws IOException {
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(createJobStartResponse());
|
||||
|
||||
Parameters input = new Parameters();
|
||||
|
@ -790,7 +819,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
BulkExportParameters bp = verifyJobStart();
|
||||
BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
|
||||
assertThat(bp.getResourceTypes(), containsInAnyOrder("Patient"));
|
||||
}
|
||||
|
@ -798,7 +827,7 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testInitiatePatientExportRequest() throws IOException {
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(createJobStartResponse());
|
||||
|
||||
InstantType now = InstantType.now();
|
||||
|
@ -823,7 +852,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
|
||||
}
|
||||
|
||||
BulkExportParameters bp = verifyJobStart();
|
||||
BulkExportJobParameters bp = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
|
||||
assertThat(bp.getResourceTypes(), containsInAnyOrder("Immunization", "Observation"));
|
||||
assertThat(bp.getSince(), notNullValue());
|
||||
|
@ -837,7 +866,7 @@ public class BulkDataExportProviderTest {
|
|||
startResponse.setUsesCachedResult(true);
|
||||
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(startResponse);
|
||||
|
||||
Parameters input = new Parameters();
|
||||
|
@ -858,8 +887,8 @@ public class BulkDataExportProviderTest {
|
|||
}
|
||||
|
||||
// verify
|
||||
BulkExportParameters parameters = verifyJobStart();
|
||||
assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false)));
|
||||
JobInstanceStartRequest parameters = verifyJobStart();
|
||||
assertFalse(parameters.isUseCache());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -871,7 +900,7 @@ public class BulkDataExportProviderTest {
|
|||
myStorageSettings.setEnableBulkExportJobReuse(false);
|
||||
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(startResponse);
|
||||
|
||||
Parameters input = new Parameters();
|
||||
|
@ -891,8 +920,8 @@ public class BulkDataExportProviderTest {
|
|||
}
|
||||
|
||||
// verify
|
||||
BulkExportParameters parameters = verifyJobStart();
|
||||
assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false)));
|
||||
JobInstanceStartRequest parameters = verifyJobStart();
|
||||
assertFalse(parameters.isUseCache());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -901,7 +930,7 @@ public class BulkDataExportProviderTest {
|
|||
Batch2JobStartResponse startResponse = createJobStartResponse();
|
||||
startResponse.setUsesCachedResult(true);
|
||||
startResponse.setInstanceId(A_JOB_ID);
|
||||
when(myJobRunner.startNewJob(isNotNull(), any(Batch2BaseJobParameters.class)))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(startResponse);
|
||||
|
||||
// when
|
||||
|
@ -919,22 +948,26 @@ public class BulkDataExportProviderTest {
|
|||
@MethodSource("paramsProvider")
|
||||
public void testDeleteForOperationPollStatus_SUBMITTED_ShouldCancelJobSuccessfully(boolean partitioningEnabled) throws IOException {
|
||||
// setup
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.SUBMITTED);
|
||||
info.setEndTime(InstantType.now().getValue());
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
if (partitioningEnabled) {
|
||||
info.setRequestPartitionId(myRequestPartitionId);
|
||||
parameters.setPartitionId(myRequestPartitionId);
|
||||
}
|
||||
Batch2JobOperationResult result = new Batch2JobOperationResult();
|
||||
|
||||
JobInstance info = new JobInstance();
|
||||
info.setParameters(parameters);
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.QUEUED);
|
||||
info.setEndTime(InstantType.now().getValue());
|
||||
JobOperationResultJson result = new JobOperationResultJson();
|
||||
result.setOperation("Cancel job instance " + A_JOB_ID);
|
||||
result.setMessage("Job instance <" + A_JOB_ID + "> successfully cancelled.");
|
||||
result.setSuccess(true);
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
when(myJobRunner.cancelInstance(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.cancelInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(result);
|
||||
|
||||
// call
|
||||
|
@ -955,7 +988,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(202, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
|
||||
|
||||
verify(myJobRunner, times(1)).cancelInstance(A_JOB_ID);
|
||||
verify(myJobCoordinator, times(1)).cancelInstance(A_JOB_ID);
|
||||
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||
ourLog.info("Response content: {}", responseContent);
|
||||
assertThat(responseContent, containsString("successfully cancelled."));
|
||||
|
@ -965,13 +998,16 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testDeleteForOperationPollStatus_COMPLETE_ShouldReturnError() throws IOException {
|
||||
// setup
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.COMPLETE);
|
||||
JobInstance info = new JobInstance();
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.COMPLETED);
|
||||
info.setEndTime(InstantType.now().getValue());
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
info.setParameters(parameters);
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
|
||||
// call
|
||||
|
@ -984,7 +1020,7 @@ public class BulkDataExportProviderTest {
|
|||
assertEquals(404, response.getStatusLine().getStatusCode());
|
||||
assertEquals("Not Found", response.getStatusLine().getReasonPhrase());
|
||||
|
||||
verify(myJobRunner, times(1)).cancelInstance(A_JOB_ID);
|
||||
verify(myJobCoordinator, times(1)).cancelInstance(A_JOB_ID);
|
||||
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
|
||||
// content would be blank, since the job is cancelled, so no
|
||||
ourLog.info("Response content: {}", responseContent);
|
||||
|
@ -995,7 +1031,7 @@ public class BulkDataExportProviderTest {
|
|||
@Test
|
||||
public void testGetBulkExport_outputFormat_FhirNdJson_inHeader() throws IOException {
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(createJobStartResponse());
|
||||
|
||||
// call
|
||||
|
@ -1011,14 +1047,14 @@ public class BulkDataExportProviderTest {
|
|||
assertTrue(IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8).isEmpty());
|
||||
}
|
||||
|
||||
final BulkExportParameters params = verifyJobStart();
|
||||
final BulkExportJobParameters params = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBulkExport_outputFormat_FhirNdJson_inUrl() throws IOException {
|
||||
// when
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(createJobStartResponse());
|
||||
|
||||
// call
|
||||
|
@ -1034,7 +1070,7 @@ public class BulkDataExportProviderTest {
|
|||
);
|
||||
}
|
||||
|
||||
final BulkExportParameters params = verifyJobStart();
|
||||
final BulkExportJobParameters params = verifyJobStartAndReturnParameters();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
|
||||
}
|
||||
|
||||
|
@ -1042,6 +1078,8 @@ public class BulkDataExportProviderTest {
|
|||
public void testOperationExportPollStatus_POST_NonExistingId_NotFound() throws IOException {
|
||||
String jobId = "NonExisting-JobId";
|
||||
|
||||
when(myJobCoordinator.getInstance(any())).thenThrow(new ResourceNotFoundException("Unknown"));
|
||||
|
||||
// Create the initial launch Parameters containing the request
|
||||
Parameters input = new Parameters();
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(ca.uhn.fhir.rest.api.Constants.CT_FHIR_NDJSON));
|
||||
|
@ -1063,16 +1101,19 @@ public class BulkDataExportProviderTest {
|
|||
@MethodSource("paramsProvider")
|
||||
public void testOperationExportPollStatus_POST_ExistingId_Accepted(boolean partititioningEnabled) throws IOException {
|
||||
// setup
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.SUBMITTED);
|
||||
JobInstance info = new JobInstance();
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.QUEUED);
|
||||
info.setEndTime(InstantType.now().getValue());
|
||||
if(partititioningEnabled) {
|
||||
info.setRequestPartitionId(myRequestPartitionId);
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
if (partititioningEnabled) {
|
||||
parameters.setPartitionId(myRequestPartitionId);
|
||||
}
|
||||
info.setParameters(parameters);
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
|
||||
// Create the initial launch Parameters containing the request
|
||||
|
@ -1159,14 +1200,17 @@ public class BulkDataExportProviderTest {
|
|||
// setup
|
||||
enablePartitioning();
|
||||
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(A_JOB_ID);
|
||||
info.setStatus(BulkExportJobStatusEnum.BUILDING);
|
||||
JobInstance info = new JobInstance();
|
||||
info.setInstanceId(A_JOB_ID);
|
||||
info.setStatus(StatusEnum.IN_PROGRESS);
|
||||
info.setEndTime(new Date());
|
||||
info.setRequestPartitionId(myRequestPartitionId);
|
||||
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
parameters.setPartitionId(myRequestPartitionId);
|
||||
info.setParameters(parameters);
|
||||
|
||||
// when
|
||||
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
|
||||
when(myJobCoordinator.getInstance(eq(A_JOB_ID)))
|
||||
.thenReturn(info);
|
||||
|
||||
// test
|
||||
|
@ -1189,5 +1233,25 @@ public class BulkDataExportProviderTest {
|
|||
);
|
||||
}
|
||||
|
||||
private class MyRequestPartitionHelperSvc extends RequestPartitionHelperSvc {
|
||||
@Override
|
||||
public @NotNull RequestPartitionId determineReadPartitionForRequest(RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails) {
|
||||
assert theRequest != null;
|
||||
if (myPartitionName.equals(theRequest.getTenantId())) {
|
||||
return myRequestPartitionId;
|
||||
} else {
|
||||
return RequestPartitionId.fromPartitionName(theRequest.getTenantId());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {
|
||||
if (!myPartitionName.equals(theRequest.getTenantId()) && theRequest.getTenantId() != null) {
|
||||
throw new ForbiddenOperationException("User does not have access to resources on the requested partition");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,22 +1,44 @@
|
|||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.util.BulkExportUtils;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.io.LineIterator;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.Basic;
|
||||
import org.hl7.fhir.r4.model.Binary;
|
||||
import org.hl7.fhir.r4.model.CarePlan;
|
||||
import org.hl7.fhir.r4.model.Device;
|
||||
import org.hl7.fhir.r4.model.DocumentReference;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.Group;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Location;
|
||||
import org.hl7.fhir.r4.model.MedicationAdministration;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Practitioner;
|
||||
import org.hl7.fhir.r4.model.Provenance;
|
||||
import org.hl7.fhir.r4.model.QuestionnaireResponse;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.ServiceRequest;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.MethodOrderer;
|
||||
|
@ -58,7 +80,7 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportTest.class);
|
||||
|
||||
@Autowired
|
||||
private IBatch2JobRunner myJobRunner;
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@AfterEach
|
||||
void afterEach() {
|
||||
|
@ -94,11 +116,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(group).execute();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setGroupId(new IdType("Group", "G"));
|
||||
options.setGroupId("Group/G");
|
||||
options.setFilters(Sets.newHashSet("Patient?gender=female"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, Collections.singletonList("Patient/PF"), Collections.singletonList("Patient/PM"));
|
||||
}
|
||||
|
@ -131,11 +153,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(group).execute();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setGroupId(new IdType("Group", "G"));
|
||||
options.setGroupId("Group/G");
|
||||
options.setFilters(Sets.newHashSet("Patient?_security=http://security|val1"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, Collections.singletonList("Patient/PM"), Collections.singletonList("Patient/PF"));
|
||||
}
|
||||
|
@ -170,11 +192,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(group).execute();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setGroupId(new IdType("Group", "G2"));
|
||||
options.setGroupId("Group/G2");
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, List.of("Patient/PING1", "Patient/PING2"), Collections.singletonList("Patient/PNING3"));
|
||||
}
|
||||
|
@ -195,11 +217,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(group).execute();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setGroupId(new IdType("Group", "G2"));
|
||||
options.setGroupId("Group/G2");
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
@ -255,11 +277,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
String obsId3 = myClient.create().resource(observation).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Encounter"));
|
||||
options.setPatientIds(Sets.newHashSet(new IdType("Patient", "P1")));
|
||||
options.setPatientIds(Set.of("Patient/P1"));
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
verifyBulkExportResults(options, List.of("Patient/P1", obsId, encId), List.of("Patient/P2", obsId2, encId2, obsId3));
|
||||
|
@ -317,11 +339,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
String encId3 = myClient.create().resource(encounter).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Encounter"));
|
||||
options.setPatientIds(Sets.newHashSet(new IdType("Patient", "P1"), new IdType("Patient", "P2")));
|
||||
options.setPatientIds(Set.of("Patient/P1", "Patient/P2"));
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
verifyBulkExportResults(options, List.of("Patient/P1", obsId, encId, "Patient/P2", obsId2, encId2), List.of("Patient/P3", obsId3, encId3));
|
||||
|
@ -399,11 +421,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(group).execute();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Encounter"));
|
||||
options.setGroupId(new IdType("Group", "G1"));
|
||||
options.setGroupId("Group/G1");
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, List.of("Patient/P1", practId, orgId, encId, encId2, locId), List.of("Patient/P2", orgId2, encId3, locId2));
|
||||
}
|
||||
|
@ -440,11 +462,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(group).execute();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Encounter", "Observation"));
|
||||
options.setGroupId(new IdType("Group", "G1"));
|
||||
options.setGroupId("Group/G1");
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, List.of("Patient/P1", practId, encId, obsId), Collections.emptyList());
|
||||
}
|
||||
|
@ -497,11 +519,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(group).execute();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Provenance"));
|
||||
options.setGroupId(new IdType("Group", "G1"));
|
||||
options.setGroupId("Group/G1");
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, List.of("Patient/P1", obsId, provId, devId, devId2), List.of("Patient/P2", provId2, devId3));
|
||||
}
|
||||
|
@ -535,11 +557,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
myClient.update().resource(observation).execute();
|
||||
|
||||
// Set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "Group"));
|
||||
options.setGroupId(new IdType("Group", "B"));
|
||||
options.setGroupId("Group/B");
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setSince(timeDate);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
// Should get the sub-resource (Observation) even the patient hasn't been updated after the _since param
|
||||
|
@ -575,9 +597,9 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
String questRespAuthId = myClient.create().resource(questionnaireResponseAuth).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Basic", "DocumentReference", "QuestionnaireResponse"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, List.of("Patient/P1", basicId, docRefId, questRespAuthId, questRespSubId), Collections.emptyList());
|
||||
}
|
||||
|
@ -623,9 +645,9 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
String obsPerId = myClient.create().resource(observationPer).execute().getId().toUnqualifiedVersionless().getValue();
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Observation", "CarePlan", "MedicationAdministration", "ServiceRequest"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, List.of("Patient/P1", carePlanId, medAdminId, sevReqId, obsSubId, obsPerId), Collections.emptyList());
|
||||
}
|
||||
|
@ -646,9 +668,9 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
|
||||
|
||||
// set the export options
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient", "Device"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
verifyBulkExportResults(options, List.of("Patient/P1", deviceId), Collections.emptyList());
|
||||
}
|
||||
|
@ -675,11 +697,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
device.setPatient(patientReference);
|
||||
final IIdType deviceIdType = myClient.create().resource(device).execute().getId().toUnqualifiedVersionless();
|
||||
|
||||
final BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(resourceTypesForExport);
|
||||
options.setGroupId(new IdType("Group", "B"));
|
||||
options.setGroupId("Group/B");
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
final List<String> expectedContainedIds;
|
||||
|
@ -700,18 +722,57 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
expectedIds.add(createObservation(withStatus("final")).getValue());
|
||||
}
|
||||
|
||||
final BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Set.of("Patient", "Observation"));
|
||||
options.setFilters(Set.of("Patient?active=true", "Patient?active=false", "Observation?status=final"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
JobInstance finalJobInstance = verifyBulkExportResults(options, expectedIds, List.of());
|
||||
assertEquals(40, finalJobInstance.getCombinedRecordsProcessed());
|
||||
}
|
||||
|
||||
private JobInstance verifyBulkExportResults(BulkDataExportOptions theOptions, List<String> theContainedList, List<String> theExcludedList) {
|
||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
|
||||
@Test
|
||||
public void testSystemBulkExport_WithBulkExportInclusionInterceptor() {
|
||||
|
||||
class BoysOnlyInterceptor {
|
||||
|
||||
@Hook(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)
|
||||
public boolean include(IBaseResource theResource) {
|
||||
if (((Patient)theResource).getGender() == Enumerations.AdministrativeGender.FEMALE) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
myInterceptorRegistry.registerInterceptor(new BoysOnlyInterceptor());
|
||||
try {
|
||||
|
||||
List<String> expectedIds = new ArrayList<>();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
expectedIds.add(createPatient(withActiveTrue(), withGender("male")).getValue());
|
||||
}
|
||||
for (int i = 0; i < 10; i++) {
|
||||
createPatient(withActiveTrue(), withGender("female"));
|
||||
}
|
||||
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Set.of("Patient", "Observation"));
|
||||
options.setFilters(Set.of("Patient?active=true", "Patient?active=false", "Observation?status=final"));
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
JobInstance finalJobInstance = verifyBulkExportResults(options, expectedIds, List.of());
|
||||
assertEquals(10, finalJobInstance.getCombinedRecordsProcessed());
|
||||
|
||||
} finally {
|
||||
myInterceptorRegistry.unregisterInterceptorsIf(t->t instanceof BoysOnlyInterceptor);
|
||||
}
|
||||
}
|
||||
|
||||
private JobInstance verifyBulkExportResults(BulkExportJobParameters theOptions, List<String> theContainedList, List<String> theExcludedList) {
|
||||
Batch2JobStartResponse startResponse = startNewJob(theOptions);
|
||||
|
||||
assertNotNull(startResponse);
|
||||
assertFalse(startResponse.isUsesCachedResult());
|
||||
|
@ -721,14 +782,14 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
|
||||
await()
|
||||
.atMost(200, TimeUnit.SECONDS)
|
||||
.until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getStatus() == BulkExportJobStatusEnum.COMPLETE);
|
||||
.until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getStatus() == StatusEnum.COMPLETED);
|
||||
|
||||
await()
|
||||
.atMost(200, TimeUnit.SECONDS)
|
||||
.until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
|
||||
.until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport() != null);
|
||||
|
||||
// Iterate over the files
|
||||
String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
|
||||
String report = myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport();
|
||||
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
|
||||
Set<String> foundIds = new HashSet<>();
|
||||
|
@ -777,11 +838,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
@Test
|
||||
public void testValidateParameters_InvalidPostFetch_NoParams() {
|
||||
// Setup
|
||||
final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("foo");
|
||||
final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("foo");
|
||||
|
||||
// Test
|
||||
try {
|
||||
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
startNewJob(options);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
|
||||
|
@ -796,11 +857,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
@Test
|
||||
public void testValidateParameters_InvalidPostFetch_NoParamsAfterQuestionMark() {
|
||||
// Setup
|
||||
final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("Patient?");
|
||||
final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("Patient?");
|
||||
|
||||
// Test
|
||||
try {
|
||||
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
startNewJob(options);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
|
||||
|
@ -815,11 +876,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
@Test
|
||||
public void testValidateParameters_InvalidPostFetch_InvalidResourceType() {
|
||||
// Setup
|
||||
final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("Foo?active=true");
|
||||
final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("Foo?active=true");
|
||||
|
||||
// Test
|
||||
try {
|
||||
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
startNewJob(options);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
|
||||
|
@ -834,11 +895,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
@Test
|
||||
public void testValidateParameters_InvalidPostFetch_UnsupportedParam() {
|
||||
// Setup
|
||||
final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("Observation?subject.identifier=blah");
|
||||
final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("Observation?subject.identifier=blah");
|
||||
|
||||
// Test
|
||||
try {
|
||||
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
startNewJob(options);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
|
||||
|
@ -853,11 +914,11 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
@Test
|
||||
public void testValidateParameters_InvalidPostFetch_UnknownParam() {
|
||||
// Setup
|
||||
final BulkDataExportOptions options = createOptionsWithPostFetchFilterUrl("Observation?foo=blah");
|
||||
final BulkExportJobParameters options = createOptionsWithPostFetchFilterUrl("Observation?foo=blah");
|
||||
|
||||
// Test
|
||||
try {
|
||||
myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
startNewJob(options);
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
|
||||
|
@ -868,12 +929,20 @@ public class BulkDataExportTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
private Batch2JobStartResponse startNewJob(BulkExportJobParameters theParameters) {
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setUseCache(false);
|
||||
startRequest.setParameters(theParameters);
|
||||
return myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static BulkDataExportOptions createOptionsWithPostFetchFilterUrl(String postFetchUrl) {
|
||||
final BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
private static BulkExportJobParameters createOptionsWithPostFetchFilterUrl(String postFetchUrl) {
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Set.of("Patient"));
|
||||
options.setFilters(new HashSet<>());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
options.setPostFetchFilterUrls(Set.of(postFetchUrl));
|
||||
return options;
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.api.IJobMaintenanceService;
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
|
@ -16,13 +17,13 @@ import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
|||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.BulkExportUtils;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
@ -44,7 +45,6 @@ import org.hl7.fhir.r4.model.Extension;
|
|||
import org.hl7.fhir.r4.model.Group;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
|
@ -96,7 +96,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
private static final Logger ourLog = LoggerFactory.getLogger(BulkExportUseCaseTest.class);
|
||||
|
||||
@Autowired
|
||||
private IBatch2JobRunner myJobRunner;
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@Autowired
|
||||
private IJobPersistence myJobPersistence;
|
||||
|
@ -439,15 +439,18 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
myPatientDao.update(patient);
|
||||
}
|
||||
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Collections.singleton("Patient"));
|
||||
options.setFilters(Collections.emptySet());
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(options);
|
||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
|
||||
assertNotNull(startResponse);
|
||||
|
||||
|
@ -562,25 +565,28 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
int patientsCreated = myPatientDao.search(SearchParameterMap.newSynchronous(), details).size();
|
||||
assertEquals(numPatients, patientsCreated);
|
||||
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
Batch2JobStartResponse job = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(options);
|
||||
Batch2JobStartResponse job = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
myBatch2JobHelper.awaitJobCompletion(job.getInstanceId(), 60);
|
||||
ourLog.debug("Job status after awaiting - {}", myJobRunner.getJobInfo(job.getInstanceId()).getStatus());
|
||||
ourLog.debug("Job status after awaiting - {}", myJobCoordinator.getInstance(job.getInstanceId()).getStatus());
|
||||
await()
|
||||
.atMost(300, TimeUnit.SECONDS)
|
||||
.until(() -> {
|
||||
BulkExportJobStatusEnum status = myJobRunner.getJobInfo(job.getInstanceId()).getStatus();
|
||||
if (!BulkExportJobStatusEnum.COMPLETE.equals(status)) {
|
||||
StatusEnum status = myJobCoordinator.getInstance(job.getInstanceId()).getStatus();
|
||||
if (!StatusEnum.COMPLETED.equals(status)) {
|
||||
fail("Job status was changed from COMPLETE to " + status);
|
||||
}
|
||||
return myJobRunner.getJobInfo(job.getInstanceId()).getReport() != null;
|
||||
return myJobCoordinator.getInstance(job.getInstanceId()).getReport() != null;
|
||||
});
|
||||
|
||||
String report = myJobRunner.getJobInfo(job.getInstanceId()).getReport();
|
||||
String report = myJobCoordinator.getInstance(job.getInstanceId()).getReport();
|
||||
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
List<String> binaryUrls = results.getResourceTypeToBinaryIds().get("Patient");
|
||||
|
||||
|
@ -1364,15 +1370,15 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
|
||||
BulkExportJobResults startGroupBulkExportJobAndAwaitCompletion(HashSet<String> theResourceTypes, HashSet<String> theFilters, String theGroupId) {
|
||||
return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.GROUP, theResourceTypes, theFilters, theGroupId);
|
||||
return startBulkExportJobAndAwaitCompletion(BulkExportJobParameters.ExportStyle.GROUP, theResourceTypes, theFilters, theGroupId);
|
||||
}
|
||||
|
||||
BulkExportJobResults startSystemBulkExportJobAndAwaitCompletion(Set<String> theResourceTypes, Set<String> theFilters) {
|
||||
return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.SYSTEM, theResourceTypes, theFilters, null);
|
||||
return startBulkExportJobAndAwaitCompletion(BulkExportJobParameters.ExportStyle.SYSTEM, theResourceTypes, theFilters, null);
|
||||
}
|
||||
|
||||
BulkExportJobResults startBulkExportJobAndAwaitCompletion(
|
||||
BulkDataExportOptions.ExportStyle theExportStyle,
|
||||
BulkExportJobParameters.ExportStyle theExportStyle,
|
||||
Set<String> theResourceTypes,
|
||||
Set<String> theFilters,
|
||||
String theGroupOrPatientId
|
||||
|
@ -1398,7 +1404,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
|
||||
|
||||
MethodOutcome outcome;
|
||||
if (theExportStyle == BulkDataExportOptions.ExportStyle.GROUP) {
|
||||
if (theExportStyle == BulkExportJobParameters.ExportStyle.GROUP) {
|
||||
|
||||
outcome = myClient
|
||||
.operation()
|
||||
|
@ -1408,7 +1414,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
.returnMethodOutcome()
|
||||
.withAdditionalHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC)
|
||||
.execute();
|
||||
} else if (theExportStyle == BulkDataExportOptions.ExportStyle.PATIENT && theGroupOrPatientId != null) {
|
||||
} else if (theExportStyle == BulkExportJobParameters.ExportStyle.PATIENT && theGroupOrPatientId != null) {
|
||||
//TODO add support for this actual processor.
|
||||
fail("Bulk Exports that return no data do not return");
|
||||
outcome = myClient
|
||||
|
@ -1453,32 +1459,35 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
|
||||
myBatch2JobHelper.awaitJobCompletion(jobInstanceId, 60);
|
||||
|
||||
await().atMost(300, TimeUnit.SECONDS).until(() -> myJobRunner.getJobInfo(jobInstanceId).getReport() != null);
|
||||
await().atMost(300, TimeUnit.SECONDS).until(() -> myJobCoordinator.getInstance(jobInstanceId).getReport() != null);
|
||||
|
||||
String report = myJobRunner.getJobInfo(jobInstanceId).getReport();
|
||||
String report = myJobCoordinator.getInstance(jobInstanceId).getReport();
|
||||
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void verifyBulkExportResults(String theGroupId, HashSet<String> theFilters, List<String> theContainedList, List<String> theExcludedList) {
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setGroupId(new IdType("Group", theGroupId));
|
||||
options.setGroupId("Group/" + theGroupId);
|
||||
options.setFilters(theFilters);
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(options);
|
||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
|
||||
assertNotNull(startResponse);
|
||||
|
||||
// Run a scheduled pass to build the export
|
||||
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId());
|
||||
|
||||
await().until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
|
||||
await().until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport() != null);
|
||||
|
||||
// Iterate over the files
|
||||
String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
|
||||
String report = myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport();
|
||||
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
for (Map.Entry<String, List<String>> file : results.getResourceTypeToBinaryIds().entrySet()) {
|
||||
List<String> binaryIds = file.getValue();
|
||||
|
@ -1500,6 +1509,6 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
|
|||
}
|
||||
|
||||
private BulkExportJobResults startPatientBulkExportJobAndAwaitResults(HashSet<String> theTypes, HashSet<String> theFilters, String thePatientId) {
|
||||
return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.PATIENT, theTypes, theFilters, thePatientId);
|
||||
return startBulkExportJobAndAwaitCompletion(BulkExportJobParameters.ExportStyle.PATIENT, theTypes, theFilters, thePatientId);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,14 +3,13 @@ package ca.uhn.fhir.jpa.bulk.export;
|
|||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.export.ExpandResourcesStep;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
|
|
|
@ -4,16 +4,13 @@ import ca.uhn.fhir.batch2.api.IJobDataSink;
|
|||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.api.VoidModel;
|
||||
import ca.uhn.fhir.batch2.jobs.export.FetchResourceIdsStep;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TagsTest;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.OrganizationAffiliation;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
@ -25,7 +22,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
|||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
public class FetchResourceIdsStepJpaTest extends BaseJpaR4Test {
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
package ca.uhn.fhir.jpa.interceptor;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.util.BulkExportUtils;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.hamcrest.Matchers;
|
||||
|
@ -43,7 +44,7 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
|
|||
private ResponseTerminologyTranslationInterceptor myResponseTerminologyTranslationInterceptor;
|
||||
|
||||
@Autowired
|
||||
private IBatch2JobRunner myJobRunner;
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
|
@ -198,23 +199,26 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
|
|||
|
||||
private void createBulkJobAndCheckCodingList(List<String> codingList) {
|
||||
// Create a bulk job
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Observation"));
|
||||
options.setFilters(Sets.newHashSet(TEST_OBV_FILTER));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
|
||||
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(mySrd, BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
startRequest.setParameters(options);
|
||||
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(mySrd, startRequest);
|
||||
|
||||
assertNotNull(startResponse);
|
||||
|
||||
// Run a scheduled pass to build the export
|
||||
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId());
|
||||
|
||||
await().until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
|
||||
await().until(() -> myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport() != null);
|
||||
|
||||
// Iterate over the files
|
||||
String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
|
||||
String report = myJobCoordinator.getInstance(startResponse.getInstanceId()).getReport();
|
||||
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
|
||||
for (Map.Entry<String, List<String>> file : results.getResourceTypeToBinaryIds().entrySet()) {
|
||||
String resourceTypeInFile = file.getKey();
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -18,7 +18,7 @@ import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
|
|||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
|
||||
|
@ -621,7 +621,7 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
|
|||
private BulkDataExportProvider myProvider;
|
||||
|
||||
@Mock
|
||||
private IBatch2JobRunner myJobRunner;
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@Spy
|
||||
private RequestPartitionHelperSvc myRequestPartitionHelperSvc = new MultitenantServerR4Test.PartitionTesting.MyRequestPartitionHelperSvc();
|
||||
|
@ -661,21 +661,22 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
|
|||
map.put("Patient", Arrays.asList("Binary/1", "Binary/2"));
|
||||
results.setResourceTypeToBinaryIds(map);
|
||||
|
||||
Batch2JobInfo jobInfo = new Batch2JobInfo();
|
||||
jobInfo.setJobId(jobId);
|
||||
jobInfo.setStatus(BulkExportJobStatusEnum.COMPLETE);
|
||||
JobInstance jobInfo = new JobInstance();
|
||||
jobInfo.setInstanceId(jobId);
|
||||
jobInfo.setStatus(StatusEnum.COMPLETED);
|
||||
jobInfo.setReport(JsonUtil.serialize(results));
|
||||
jobInfo.setParameters(new BulkExportJobParameters());
|
||||
|
||||
// Create a bulk job
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(Sets.newHashSet("Patient"));
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
|
||||
Batch2JobStartResponse startResponse = new Batch2JobStartResponse();
|
||||
startResponse.setInstanceId(jobId);
|
||||
when(myJobRunner.startNewJob(isNotNull(), any()))
|
||||
when(myJobCoordinator.startInstance(isNotNull(), any()))
|
||||
.thenReturn(startResponse);
|
||||
when(myJobRunner.getJobInfo(anyString()))
|
||||
when(myJobCoordinator.getInstance(anyString()))
|
||||
.thenReturn(jobInfo);
|
||||
|
||||
// mocking
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.provider.r5;
|
|||
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test;
|
||||
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.provider.DiffProvider;
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.provider;
|
|||
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.dao.data.IPartitionDao;
|
||||
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server Test Utilities
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.search;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
|
|
|
@ -37,7 +37,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
|||
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
|
||||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkJpaRepository;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -8,7 +8,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
|
||||
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1,145 +0,0 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.rest.api.server.bulk;
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
// TODO: JA in next ticket - We have 3 more or less identical classes representing
|
||||
// bulk data job parameters: BulkDataExportOptions, BulkExportJobParameters, and BulkExportParameters
|
||||
// They don't seem to serve any distinct purpose so they should be collapsed into 1
|
||||
public class BulkDataExportOptions {
|
||||
|
||||
|
||||
public enum ExportStyle {
|
||||
PATIENT,
|
||||
GROUP,
|
||||
SYSTEM
|
||||
}
|
||||
|
||||
private String myOutputFormat;
|
||||
private Set<String> myResourceTypes;
|
||||
private Date mySince;
|
||||
private Set<String> myFilters;
|
||||
private Set<String> myPostFetchFilterUrls;
|
||||
private ExportStyle myExportStyle;
|
||||
private boolean myExpandMdm;
|
||||
private IIdType myGroupId;
|
||||
private Set<IIdType> myPatientIds;
|
||||
|
||||
private String myExportIdentifier;
|
||||
|
||||
public void setOutputFormat(String theOutputFormat) {
|
||||
myOutputFormat = theOutputFormat;
|
||||
}
|
||||
|
||||
public void setResourceTypes(Set<String> theResourceTypes) {
|
||||
myResourceTypes = theResourceTypes;
|
||||
}
|
||||
|
||||
public void setSince(Date theSince) {
|
||||
mySince = theSince;
|
||||
}
|
||||
|
||||
public void setFilters(Set<String> theFilters) {
|
||||
myFilters = theFilters;
|
||||
}
|
||||
|
||||
public ExportStyle getExportStyle() {
|
||||
return myExportStyle;
|
||||
}
|
||||
|
||||
public void setExportStyle(ExportStyle theExportStyle) {
|
||||
myExportStyle = theExportStyle;
|
||||
}
|
||||
|
||||
public String getOutputFormat() {
|
||||
return myOutputFormat;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public Set<String> getResourceTypes() {
|
||||
if (myResourceTypes == null) {
|
||||
myResourceTypes = Set.of();
|
||||
}
|
||||
return myResourceTypes;
|
||||
}
|
||||
|
||||
public Date getSince() {
|
||||
return mySince;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public Set<String> getFilters() {
|
||||
if (myFilters == null) {
|
||||
myFilters = Set.of();
|
||||
}
|
||||
return myFilters;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public Set<String> getPostFetchFilterUrls() {
|
||||
if (myPostFetchFilterUrls == null) {
|
||||
myPostFetchFilterUrls = Set.of();
|
||||
}
|
||||
return myPostFetchFilterUrls;
|
||||
}
|
||||
|
||||
public void setPostFetchFilterUrls(Set<String> thePostFetchFilterUrls) {
|
||||
myPostFetchFilterUrls = thePostFetchFilterUrls;
|
||||
}
|
||||
|
||||
public boolean isExpandMdm() {
|
||||
return myExpandMdm;
|
||||
}
|
||||
|
||||
public void setExpandMdm(boolean theExpandMdm) {
|
||||
myExpandMdm = theExpandMdm;
|
||||
}
|
||||
|
||||
public IIdType getGroupId() {
|
||||
return myGroupId;
|
||||
}
|
||||
|
||||
public void setGroupId(IIdType theGroupId) {
|
||||
myGroupId = theGroupId;
|
||||
}
|
||||
|
||||
public Set<IIdType> getPatientIds() {
|
||||
return myPatientIds;
|
||||
}
|
||||
|
||||
public void setPatientIds(Set<IIdType> thePatientIds) {
|
||||
myPatientIds = thePatientIds;
|
||||
}
|
||||
|
||||
public String getExportIdentifier() {
|
||||
return myExportIdentifier;
|
||||
}
|
||||
|
||||
public void setExportIdentifier(String theExportIdentifier) {
|
||||
myExportIdentifier = theExportIdentifier;
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -17,24 +17,26 @@
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.export.models;
|
||||
package ca.uhn.fhir.rest.api.server.bulk;
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||
import ca.uhn.fhir.jpa.util.JsonDateDeserializer;
|
||||
import ca.uhn.fhir.jpa.util.JsonDateSerializer;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.server.util.JsonDateDeserializer;
|
||||
import ca.uhn.fhir.rest.server.util.JsonDateSerializer;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public class BulkExportJobParameters extends BulkExportJobBase {
|
||||
public class BulkExportJobParameters implements IModelJson {
|
||||
|
||||
// list of resource types to export
|
||||
/**
|
||||
* List of resource types to export.
|
||||
*/
|
||||
@JsonProperty("resourceTypes")
|
||||
private List<String> myResourceTypes;
|
||||
|
||||
|
@ -50,22 +52,43 @@ public class BulkExportJobParameters extends BulkExportJobBase {
|
|||
@JsonProperty("exportId")
|
||||
private String myExportId;
|
||||
|
||||
/**
|
||||
* Filters are used to narrow down the resources to export.
|
||||
* Eg:
|
||||
* Patient/123?group=a
|
||||
* "group=a" is a filter
|
||||
*/
|
||||
@JsonProperty("filters")
|
||||
private List<String> myFilters;
|
||||
|
||||
/**
|
||||
* URLs to be applied by the inMemoryMatcher after the SQL select
|
||||
*/
|
||||
@JsonProperty("postFetchFilterUrls")
|
||||
private List<String> myPostFetchFilterUrls;
|
||||
|
||||
/**
|
||||
* Output format.
|
||||
* Currently unsupported (all outputs are ndjson)
|
||||
*/
|
||||
@JsonProperty("outputFormat")
|
||||
private String myOutputFormat;
|
||||
|
||||
// TODO - move enum
|
||||
/**
|
||||
* Export style - Patient, Group or Everything
|
||||
*/
|
||||
@JsonProperty("exportStyle")
|
||||
private BulkDataExportOptions.ExportStyle myExportStyle;
|
||||
private ExportStyle myExportStyle;
|
||||
|
||||
/**
|
||||
* Patient id(s)
|
||||
*/
|
||||
@JsonProperty("patientIds")
|
||||
private List<String> myPatientIds;
|
||||
|
||||
/**
|
||||
* The request which originated the request.
|
||||
*/
|
||||
@JsonProperty("originalRequestUrl")
|
||||
private String myOriginalRequestUrl;
|
||||
|
||||
|
@ -75,33 +98,27 @@ public class BulkExportJobParameters extends BulkExportJobBase {
|
|||
@JsonProperty("groupId")
|
||||
private String myGroupId;
|
||||
|
||||
/**
|
||||
* For group export;
|
||||
* whether or not to expand mdm
|
||||
*/
|
||||
@JsonProperty("expandMdm")
|
||||
private boolean myExpandMdm;
|
||||
|
||||
/**
|
||||
* The partition for the request if applicable.
|
||||
*/
|
||||
@JsonProperty("partitionId")
|
||||
private RequestPartitionId myPartitionId;
|
||||
|
||||
public static BulkExportJobParameters createFromExportJobParameters(BulkExportParameters theParameters) {
|
||||
BulkExportJobParameters params = new BulkExportJobParameters();
|
||||
params.setResourceTypes(theParameters.getResourceTypes());
|
||||
params.setExportStyle(theParameters.getExportStyle());
|
||||
params.setExportIdentifier(theParameters.getExportIdentifier());
|
||||
params.setFilters(theParameters.getFilters());
|
||||
params.setPostFetchFilterUrls(theParameters.getPostFetchFilterUrls());
|
||||
params.setGroupId(theParameters.getGroupId());
|
||||
params.setOutputFormat(theParameters.getOutputFormat());
|
||||
params.setSince(theParameters.getSince());
|
||||
params.setExpandMdm(theParameters.isExpandMdm());
|
||||
params.setPatientIds(theParameters.getPatientIds());
|
||||
params.setOriginalRequestUrl(theParameters.getOriginalRequestUrl());
|
||||
params.setPartitionId(theParameters.getPartitionId());
|
||||
return params;
|
||||
}
|
||||
|
||||
public String getExportIdentifier() {
|
||||
return myExportId;
|
||||
}
|
||||
|
||||
public void setExportIdentifier(String theExportId) {
|
||||
myExportId = theExportId;
|
||||
}
|
||||
|
||||
public List<String> getResourceTypes() {
|
||||
if (myResourceTypes == null) {
|
||||
myResourceTypes = new ArrayList<>();
|
||||
|
@ -109,12 +126,11 @@ public class BulkExportJobParameters extends BulkExportJobBase {
|
|||
return myResourceTypes;
|
||||
}
|
||||
|
||||
public void setExportIdentifier(String theExportId) {
|
||||
myExportId = theExportId;
|
||||
public void setResourceTypes(Collection<String> theResourceTypes) {
|
||||
getResourceTypes().clear();
|
||||
if (theResourceTypes != null) {
|
||||
getResourceTypes().addAll(theResourceTypes);
|
||||
}
|
||||
|
||||
public void setResourceTypes(List<String> theResourceTypes) {
|
||||
myResourceTypes = theResourceTypes;
|
||||
}
|
||||
|
||||
public Date getSince() {
|
||||
|
@ -126,11 +142,17 @@ public class BulkExportJobParameters extends BulkExportJobBase {
|
|||
}
|
||||
|
||||
public List<String> getFilters() {
|
||||
if (myFilters == null) {
|
||||
myFilters = new ArrayList<>();
|
||||
}
|
||||
return myFilters;
|
||||
}
|
||||
|
||||
public void setFilters(List<String> theFilters) {
|
||||
myFilters = theFilters;
|
||||
public void setFilters(Collection<String> theFilters) {
|
||||
getFilters().clear();
|
||||
if (theFilters != null) {
|
||||
getFilters().addAll(theFilters);
|
||||
}
|
||||
}
|
||||
|
||||
public List<String> getPostFetchFilterUrls() {
|
||||
|
@ -140,8 +162,11 @@ public class BulkExportJobParameters extends BulkExportJobBase {
|
|||
return myPostFetchFilterUrls;
|
||||
}
|
||||
|
||||
public void setPostFetchFilterUrls(List<String> thePostFetchFilterUrls) {
|
||||
myPostFetchFilterUrls = thePostFetchFilterUrls;
|
||||
public void setPostFetchFilterUrls(Collection<String> thePostFetchFilterUrls) {
|
||||
getPostFetchFilterUrls().clear();
|
||||
if (thePostFetchFilterUrls != null) {
|
||||
getPostFetchFilterUrls().addAll(thePostFetchFilterUrls);
|
||||
}
|
||||
}
|
||||
|
||||
public String getOutputFormat() {
|
||||
|
@ -152,20 +177,26 @@ public class BulkExportJobParameters extends BulkExportJobBase {
|
|||
myOutputFormat = theOutputFormat;
|
||||
}
|
||||
|
||||
public BulkDataExportOptions.ExportStyle getExportStyle() {
|
||||
public ExportStyle getExportStyle() {
|
||||
return myExportStyle;
|
||||
}
|
||||
|
||||
public void setExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) {
|
||||
public void setExportStyle(ExportStyle theExportStyle) {
|
||||
myExportStyle = theExportStyle;
|
||||
}
|
||||
|
||||
public List<String> getPatientIds() {
|
||||
if (myPatientIds == null) {
|
||||
myPatientIds = new ArrayList<>();
|
||||
}
|
||||
return myPatientIds;
|
||||
}
|
||||
|
||||
public void setPatientIds(List<String> thePatientIds) {
|
||||
myPatientIds = thePatientIds;
|
||||
public void setPatientIds(Collection<String> thePatientIds) {
|
||||
getPatientIds().clear();
|
||||
if (thePatientIds != null) {
|
||||
getPatientIds().addAll(thePatientIds);
|
||||
}
|
||||
}
|
||||
|
||||
public String getGroupId() {
|
||||
|
@ -188,7 +219,7 @@ public class BulkExportJobParameters extends BulkExportJobBase {
|
|||
return myOriginalRequestUrl;
|
||||
}
|
||||
|
||||
private void setOriginalRequestUrl(String theOriginalRequestUrl) {
|
||||
public void setOriginalRequestUrl(String theOriginalRequestUrl) {
|
||||
this.myOriginalRequestUrl = theOriginalRequestUrl;
|
||||
}
|
||||
|
||||
|
@ -200,4 +231,7 @@ public class BulkExportJobParameters extends BulkExportJobBase {
|
|||
this.myPartitionId = thePartitionId;
|
||||
}
|
||||
|
||||
public enum ExportStyle {
|
||||
PATIENT, GROUP, SYSTEM
|
||||
}
|
||||
}
|
|
@ -28,7 +28,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.ConsentInterceptor;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -195,6 +195,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
this.myAuthorizationSearchParamMatcher = theAuthorizationSearchParamMatcher;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public IAuthorizationSearchParamMatcher getSearchParamMatcher() {
|
||||
return myAuthorizationSearchParamMatcher;
|
||||
|
@ -410,7 +411,7 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
}
|
||||
|
||||
@Hook(Pointcut.STORAGE_INITIATE_BULK_EXPORT)
|
||||
public void initiateBulkExport(RequestDetails theRequestDetails, BulkDataExportOptions theBulkExportOptions, Pointcut thePointcut) {
|
||||
public void initiateBulkExport(RequestDetails theRequestDetails, BulkExportJobParameters theBulkExportOptions, Pointcut thePointcut) {
|
||||
// RestOperationTypeEnum restOperationType = determineRestOperationTypeFromBulkExportOptions(theBulkExportOptions);
|
||||
RestOperationTypeEnum restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER;
|
||||
|
||||
|
@ -424,18 +425,18 @@ public class AuthorizationInterceptor implements IRuleApplier {
|
|||
* TODO GGG This method should eventually be used when invoking the rules applier.....however we currently rely on the incorrect
|
||||
* behaviour of passing down `EXTENDED_OPERATION_SERVER`.
|
||||
*/
|
||||
private RestOperationTypeEnum determineRestOperationTypeFromBulkExportOptions(BulkDataExportOptions theBulkExportOptions) {
|
||||
private RestOperationTypeEnum determineRestOperationTypeFromBulkExportOptions(BulkExportJobParameters theBulkExportOptions) {
|
||||
RestOperationTypeEnum restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER;
|
||||
BulkDataExportOptions.ExportStyle exportStyle = theBulkExportOptions.getExportStyle();
|
||||
if (exportStyle.equals(BulkDataExportOptions.ExportStyle.SYSTEM)) {
|
||||
BulkExportJobParameters.ExportStyle exportStyle = theBulkExportOptions.getExportStyle();
|
||||
if (exportStyle.equals(BulkExportJobParameters.ExportStyle.SYSTEM)) {
|
||||
restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_SERVER;
|
||||
} else if (exportStyle.equals(BulkDataExportOptions.ExportStyle.PATIENT)) {
|
||||
} else if (exportStyle.equals(BulkExportJobParameters.ExportStyle.PATIENT)) {
|
||||
if (theBulkExportOptions.getPatientIds().size() == 1) {
|
||||
restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
|
||||
} else {
|
||||
restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_TYPE;
|
||||
}
|
||||
} else if (exportStyle.equals(BulkDataExportOptions.ExportStyle.GROUP)) {
|
||||
} else if (exportStyle.equals(BulkExportJobParameters.ExportStyle.GROUP)) {
|
||||
restOperationType = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
|
||||
}
|
||||
return restOperationType;
|
||||
|
|
|
@ -23,11 +23,12 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -40,7 +41,7 @@ public class RuleBulkExportImpl extends BaseRule {
|
|||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RuleBulkExportImpl.class);
|
||||
private String myGroupId;
|
||||
private String myPatientId;
|
||||
private BulkDataExportOptions.ExportStyle myWantExportStyle;
|
||||
private BulkExportJobParameters.ExportStyle myWantExportStyle;
|
||||
private Collection<String> myResourceTypes;
|
||||
private boolean myWantAnyStyle;
|
||||
|
||||
|
@ -58,7 +59,7 @@ public class RuleBulkExportImpl extends BaseRule {
|
|||
return null;
|
||||
}
|
||||
|
||||
BulkDataExportOptions options = (BulkDataExportOptions) theRequestDetails.getAttribute(AuthorizationInterceptor.REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS);
|
||||
BulkExportJobParameters options = (BulkExportJobParameters) theRequestDetails.getAttribute(AuthorizationInterceptor.REQUEST_ATTRIBUTE_BULK_DATA_EXPORT_OPTIONS);
|
||||
|
||||
if (!myWantAnyStyle && options.getExportStyle() != myWantExportStyle) {
|
||||
return null;
|
||||
|
@ -75,13 +76,13 @@ public class RuleBulkExportImpl extends BaseRule {
|
|||
}
|
||||
}
|
||||
|
||||
if (myWantAnyStyle || myWantExportStyle == BulkDataExportOptions.ExportStyle.SYSTEM) {
|
||||
if (myWantAnyStyle || myWantExportStyle == BulkExportJobParameters.ExportStyle.SYSTEM) {
|
||||
return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier);
|
||||
}
|
||||
|
||||
if (isNotBlank(myGroupId) && options.getGroupId() != null) {
|
||||
String expectedGroupId = new IdDt(myGroupId).toUnqualifiedVersionless().getValue();
|
||||
String actualGroupId = options.getGroupId().toUnqualifiedVersionless().getValue();
|
||||
String actualGroupId = new IdDt(options.getGroupId()).toUnqualifiedVersionless().getValue();
|
||||
if (Objects.equals(expectedGroupId, actualGroupId)) {
|
||||
return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier);
|
||||
}
|
||||
|
@ -90,12 +91,12 @@ public class RuleBulkExportImpl extends BaseRule {
|
|||
// TODO This is a _bad bad bad implementation_ but we are out of time.
|
||||
// 1. If a claimed resource ID is present in the parameters, and the permission contains one, check for membership
|
||||
// 2. If not a member, Deny.
|
||||
if (myWantExportStyle == BulkDataExportOptions.ExportStyle.PATIENT && isNotBlank(myPatientId)) {
|
||||
if (myWantExportStyle == BulkExportJobParameters.ExportStyle.PATIENT && isNotBlank(myPatientId)) {
|
||||
final String expectedPatientId = new IdDt(myPatientId).toUnqualifiedVersionless().getValue();
|
||||
if (options.getPatientIds() != null) {
|
||||
if (!options.getPatientIds().isEmpty()) {
|
||||
ourLog.debug("options.getPatientIds() != null");
|
||||
final String actualPatientIds = options.getPatientIds().stream()
|
||||
.map(t -> t.toUnqualifiedVersionless().getValue())
|
||||
.map(t -> new IdDt(t).toUnqualifiedVersionless().getValue())
|
||||
.collect(Collectors.joining(","));
|
||||
if (actualPatientIds.contains(expectedPatientId)) {
|
||||
return newVerdict(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, theRuleApplier);
|
||||
|
@ -104,7 +105,7 @@ public class RuleBulkExportImpl extends BaseRule {
|
|||
return new AuthorizationInterceptor.Verdict(PolicyEnum.DENY,this);
|
||||
}
|
||||
|
||||
final Set<String> filters = options.getFilters();
|
||||
final List<String> filters = options.getFilters();
|
||||
|
||||
// TODO: LD: This admittedly adds more to the tech debt above, and should really be addressed by https://github.com/hapifhir/hapi-fhir/issues/4990
|
||||
if (! filters.isEmpty()) {
|
||||
|
@ -126,22 +127,22 @@ public class RuleBulkExportImpl extends BaseRule {
|
|||
}
|
||||
|
||||
public void setAppliesToGroupExportOnGroup(String theGroupId) {
|
||||
myWantExportStyle = BulkDataExportOptions.ExportStyle.GROUP;
|
||||
myWantExportStyle = BulkExportJobParameters.ExportStyle.GROUP;
|
||||
myGroupId = theGroupId;
|
||||
}
|
||||
|
||||
public void setAppliesToPatientExportOnGroup(String theGroupId) {
|
||||
myWantExportStyle = BulkDataExportOptions.ExportStyle.PATIENT;
|
||||
myWantExportStyle = BulkExportJobParameters.ExportStyle.PATIENT;
|
||||
myGroupId = theGroupId;
|
||||
}
|
||||
|
||||
public void setAppliesToPatientExport(String thePatientId) {
|
||||
myWantExportStyle = BulkDataExportOptions.ExportStyle.PATIENT;
|
||||
myWantExportStyle = BulkExportJobParameters.ExportStyle.PATIENT;
|
||||
myPatientId = thePatientId;
|
||||
}
|
||||
|
||||
public void setAppliesToSystem() {
|
||||
myWantExportStyle = BulkDataExportOptions.ExportStyle.SYSTEM;
|
||||
myWantExportStyle = BulkExportJobParameters.ExportStyle.SYSTEM;
|
||||
}
|
||||
|
||||
public void setResourceTypes(Collection<String> theResourceTypes) {
|
||||
|
@ -156,7 +157,7 @@ public class RuleBulkExportImpl extends BaseRule {
|
|||
return myGroupId;
|
||||
}
|
||||
|
||||
BulkDataExportOptions.ExportStyle getWantExportStyle() {
|
||||
BulkExportJobParameters.ExportStyle getWantExportStyle() {
|
||||
return myWantExportStyle;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -17,12 +17,12 @@
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.util;
|
||||
package ca.uhn.fhir.rest.server.util;
|
||||
|
||||
import ca.uhn.fhir.model.primitive.DateTimeDt;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.JsonDeserializer;
|
||||
import org.hl7.fhir.dstu3.model.DateTimeType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
|
@ -35,7 +35,7 @@ public class JsonDateDeserializer extends JsonDeserializer<Date> {
|
|||
public Date deserialize(JsonParser theParser, DeserializationContext theDeserializationContext) throws IOException {
|
||||
String string = theParser.getValueAsString();
|
||||
if (isNotBlank(string)) {
|
||||
return new DateTimeType(string).getValue();
|
||||
return new DateTimeDt(string).getValue();
|
||||
}
|
||||
return null;
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* HAPI FHIR - Server Framework
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -17,12 +17,13 @@
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.util;
|
||||
package ca.uhn.fhir.rest.server.util;
|
||||
|
||||
import ca.uhn.fhir.model.primitive.DateTimeDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.JsonSerializer;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import org.hl7.fhir.dstu3.model.InstantType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
|
@ -32,7 +33,7 @@ public class JsonDateSerializer extends JsonSerializer<Date> {
|
|||
@Override
|
||||
public void serialize(Date theValue, JsonGenerator theGen, SerializerProvider theSerializers) throws IOException {
|
||||
if (theValue != null) {
|
||||
theGen.writeString(new InstantType(theValue).getValueAsString());
|
||||
theGen.writeString(new InstantDt(theValue).getValueAsString());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,10 +1,9 @@
|
|||
package ca.uhn.fhir.rest.server.interceptor.auth;
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
|
@ -41,7 +40,7 @@ public class RuleBulkExportImplTest {
|
|||
Set<String> myWantTypes = new HashSet<>();
|
||||
myWantTypes.add("Questionnaire");
|
||||
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(myWantTypes);
|
||||
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
@ -63,7 +62,7 @@ public class RuleBulkExportImplTest {
|
|||
myWantTypes.add("Patient");
|
||||
myWantTypes.add("Practitioner");
|
||||
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setResourceTypes(myWantTypes);
|
||||
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
@ -78,9 +77,9 @@ public class RuleBulkExportImplTest {
|
|||
myRule.setAppliesToGroupExportOnGroup("invalid group");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setGroupId(new IdDt("Group/123"));
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setGroupId("Group/123");
|
||||
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
||||
|
@ -94,9 +93,9 @@ public class RuleBulkExportImplTest {
|
|||
myRule.setAppliesToGroupExportOnGroup("Group/1");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
options.setGroupId(new IdDt("Group/1"));
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
options.setGroupId("Group/1");
|
||||
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
||||
|
@ -110,9 +109,9 @@ public class RuleBulkExportImplTest {
|
|||
RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
|
||||
myRule.setAppliesToPatientExport("Patient/123");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
options.setPatientIds(Set.of(new IdDt("Patient/123")));
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setPatientIds(Set.of("Patient/123"));
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
||||
//When
|
||||
|
@ -128,9 +127,9 @@ public class RuleBulkExportImplTest {
|
|||
RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
|
||||
myRule.setAppliesToPatientExport("Patient/123");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
options.setPatientIds(Set.of(new IdDt("Patient/456")));
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setPatientIds(Set.of("Patient/456"));
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
||||
//When
|
||||
|
@ -146,8 +145,8 @@ public class RuleBulkExportImplTest {
|
|||
RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
|
||||
myRule.setAppliesToPatientExport("Patient/123");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
||||
//When
|
||||
|
@ -163,8 +162,8 @@ public class RuleBulkExportImplTest {
|
|||
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
|
||||
myRule.setAppliesToPatientExport("Patient/123");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
final BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setFilters(Set.of("Patient?_id=123"));
|
||||
options.setResourceTypes(Set.of("Patient"));
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
@ -182,8 +181,8 @@ public class RuleBulkExportImplTest {
|
|||
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
|
||||
myRule.setAppliesToPatientExport("Patient/123");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
final BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setFilters(Set.of("Patient?_id=123"));
|
||||
options.setResourceTypes(Set.of("Patient", "Condition", "Immunization"));
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
@ -201,8 +200,8 @@ public class RuleBulkExportImplTest {
|
|||
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
|
||||
myRule.setAppliesToPatientExport("Patient/123");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
final BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setFilters(Set.of("Patient?_id=123"));
|
||||
options.setResourceTypes(Set.of("Observation"));
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
@ -220,8 +219,8 @@ public class RuleBulkExportImplTest {
|
|||
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
|
||||
myRule.setAppliesToPatientExport("Patient/123");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
final BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setFilters(Set.of("Patient?_id=123"));
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
||||
|
@ -238,8 +237,8 @@ public class RuleBulkExportImplTest {
|
|||
final RuleBulkExportImpl myRule = new RuleBulkExportImpl("b");
|
||||
myRule.setAppliesToPatientExport("Patient/123");
|
||||
myRule.setMode(PolicyEnum.ALLOW);
|
||||
final BulkDataExportOptions options = new BulkDataExportOptions();
|
||||
options.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
final BulkExportJobParameters options = new BulkExportJobParameters();
|
||||
options.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
options.setFilters(Set.of("Patient?_id=456"));
|
||||
options.setResourceTypes(Set.of("Patient"));
|
||||
when(myRequestDetails.getAttribute(any())).thenReturn(options);
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -21,7 +21,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-caching-api</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -19,11 +19,7 @@
|
|||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.config;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.parameters.UrlPartitioner;
|
||||
import ca.uhn.fhir.batch2.jobs.services.Batch2JobRunnerImpl;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
@ -34,8 +30,4 @@ public class BatchCommonCtx {
|
|||
return new UrlPartitioner(theMatchUrlService, theRequestPartitionHelperSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IBatch2JobRunner batch2JobRunner(IJobCoordinator theJobCoordinator, FhirContext theFhirContext) {
|
||||
return new Batch2JobRunnerImpl(theJobCoordinator, theFhirContext);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -17,8 +17,13 @@
|
|||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.bulk.export.provider;
|
||||
package ca.uhn.fhir.batch2.jobs.export;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.api.JobOperationResultJson;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
|
@ -27,17 +32,11 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.util.BulkExportUtils;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.StringDt;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
|
@ -47,28 +46,21 @@ import ca.uhn.fhir.rest.api.CacheControlDirective;
|
|||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.IRuleApplier;
|
||||
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.ArrayUtil;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.collections4.ListUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -83,6 +75,7 @@ import javax.servlet.http.HttpServletResponse;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -91,16 +84,18 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.collections4.CollectionUtils.isEmpty;
|
||||
import static ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters.ExportStyle;
|
||||
import static ca.uhn.fhir.util.DatatypeUtil.toStringValue;
|
||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
import static org.apache.commons.lang3.StringUtils.isEmpty;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
|
||||
public class BulkDataExportProvider {
|
||||
public static final String FARM_TO_TABLE_TYPE_FILTER_REGEX = "(?:,)(?=[A-Z][a-z]+\\?)";
|
||||
public static final List<String> PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = List.of("Practitioner", "Organization");
|
||||
/** Bulk data $export does not include the Binary type */
|
||||
/**
|
||||
* Bulk data $export does not include the Binary type
|
||||
*/
|
||||
public static final String UNSUPPORTED_BINARY_TYPE = "Binary";
|
||||
private static final Logger ourLog = getLogger(BulkDataExportProvider.class);
|
||||
|
||||
|
@ -114,7 +109,7 @@ public class BulkDataExportProvider {
|
|||
private FhirContext myFhirContext;
|
||||
|
||||
@Autowired
|
||||
private IBatch2JobRunner myJobRunner;
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@Autowired
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
@ -141,15 +136,15 @@ public class BulkDataExportProvider {
|
|||
// JPA export provider
|
||||
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
||||
|
||||
BulkDataExportOptions bulkDataExportOptions = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl);
|
||||
BulkExportJobParameters BulkExportJobParameters = buildSystemBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, theTypePostFetchFilterUrl);
|
||||
|
||||
startJob(theRequestDetails, bulkDataExportOptions);
|
||||
startJob(theRequestDetails, BulkExportJobParameters);
|
||||
}
|
||||
|
||||
private void startJob(ServletRequestDetails theRequestDetails,
|
||||
BulkDataExportOptions theOptions) {
|
||||
BulkExportJobParameters theOptions) {
|
||||
// permission check
|
||||
HookParams params = (new HookParams()).add(BulkDataExportOptions.class, theOptions)
|
||||
HookParams params = (new HookParams()).add(BulkExportJobParameters.class, theOptions)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(this.myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_INITIATE_BULK_EXPORT, params);
|
||||
|
@ -157,36 +152,29 @@ public class BulkDataExportProvider {
|
|||
// get cache boolean
|
||||
boolean useCache = shouldUseCache(theRequestDetails);
|
||||
|
||||
BulkExportParameters parameters = BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions);
|
||||
parameters.setUseExistingJobsFirst(useCache);
|
||||
|
||||
// Set the original request URL as part of the job information, as this is used in the poll-status-endpoint, and is needed for the report.
|
||||
parameters.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
|
||||
theOptions.setOriginalRequestUrl(theRequestDetails.getCompleteUrl());
|
||||
|
||||
// If no _type parameter is provided, default to all resource types except Binary
|
||||
if (isEmpty(theOptions.getResourceTypes())) {
|
||||
if (theOptions.getResourceTypes().isEmpty()) {
|
||||
List<String> resourceTypes = new ArrayList<>(myDaoRegistry.getRegisteredDaoTypes());
|
||||
resourceTypes.remove(UNSUPPORTED_BINARY_TYPE);
|
||||
parameters.setResourceTypes(resourceTypes);
|
||||
theOptions.setResourceTypes(resourceTypes);
|
||||
}
|
||||
|
||||
// Determine and validate partition permissions (if needed).
|
||||
RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null);
|
||||
myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId);
|
||||
parameters.setPartitionId(partitionId);
|
||||
theOptions.setPartitionId(partitionId);
|
||||
|
||||
// start job
|
||||
Batch2JobStartResponse response = myJobRunner.startNewJob(theRequestDetails, parameters);
|
||||
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
|
||||
startRequest.setParameters(theOptions);
|
||||
startRequest.setUseCache(useCache);
|
||||
startRequest.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
Batch2JobStartResponse response = myJobCoordinator.startInstance(theRequestDetails, startRequest);
|
||||
|
||||
JobInfo info = new JobInfo();
|
||||
info.setJobMetadataId(response.getInstanceId());
|
||||
|
||||
// We set it to submitted, even if it's using a cached job
|
||||
// This isn't an issue because the actual status isn't used
|
||||
// instead, when they poll for results, they'll get the real one
|
||||
info.setStatus(BulkExportJobStatusEnum.SUBMITTED);
|
||||
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, info);
|
||||
writePollingLocationToResponseHeaders(theRequestDetails, response.getInstanceId());
|
||||
}
|
||||
|
||||
private boolean shouldUseCache(ServletRequestDetails theRequestDetails) {
|
||||
|
@ -224,16 +212,18 @@ public class BulkDataExportProvider {
|
|||
// verify the Group exists before starting the job
|
||||
validateTargetsExists(theRequestDetails, "Group", List.of(theIdParam));
|
||||
|
||||
BulkDataExportOptions bulkDataExportOptions = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theExportIdentifier, theTypePostFetchFilterUrl);
|
||||
BulkExportJobParameters BulkExportJobParameters = buildGroupBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theIdParam, theMdm, theExportIdentifier, theTypePostFetchFilterUrl);
|
||||
|
||||
if (isNotEmpty(bulkDataExportOptions.getResourceTypes())) {
|
||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||
if (isNotEmpty(BulkExportJobParameters.getResourceTypes())) {
|
||||
validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes());
|
||||
} else {
|
||||
// all patient resource types
|
||||
bulkDataExportOptions.setResourceTypes(getPatientCompartmentResources());
|
||||
Set<String> groupTypes = new HashSet<>(getPatientCompartmentResources());
|
||||
groupTypes.removeIf(t-> !myDaoRegistry.isResourceTypeSupported(t));
|
||||
BulkExportJobParameters.setResourceTypes(groupTypes);
|
||||
}
|
||||
|
||||
startJob(theRequestDetails, bulkDataExportOptions);
|
||||
startJob(theRequestDetails, BulkExportJobParameters);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -247,14 +237,14 @@ public class BulkDataExportProvider {
|
|||
private void validateTargetsExists(RequestDetails theRequestDetails, String theTargetResourceName, Iterable<IIdType> theIdParams) {
|
||||
RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequestDetails, theTargetResourceName, theIdParams.iterator().next());
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId);
|
||||
for (IIdType nextId: theIdParams) {
|
||||
for (IIdType nextId : theIdParams) {
|
||||
myDaoRegistry.getResourceDao(theTargetResourceName)
|
||||
.read(nextId, requestDetails);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void validateResourceTypesAllContainPatientSearchParams(Set<String> theResourceTypes) {
|
||||
private void validateResourceTypesAllContainPatientSearchParams(Collection<String> theResourceTypes) {
|
||||
if (theResourceTypes != null) {
|
||||
List<String> badResourceTypes = theResourceTypes.stream()
|
||||
.filter(resourceType -> !PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(resourceType))
|
||||
|
@ -292,13 +282,13 @@ public class BulkDataExportProvider {
|
|||
validatePreferAsyncHeader(theRequestDetails, JpaConstants.OPERATION_EXPORT);
|
||||
|
||||
if (thePatient != null) {
|
||||
validateTargetsExists(theRequestDetails, "Patient", Lists.transform(thePatient, s -> new IdDt(s.getValue())));
|
||||
validateTargetsExists(theRequestDetails, "Patient", thePatient.stream().map(s -> new IdDt(s.getValue())).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, thePatient, theTypePostFetchFilterUrl);
|
||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||
BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, thePatient, theTypePostFetchFilterUrl);
|
||||
validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes());
|
||||
|
||||
startJob(theRequestDetails, bulkDataExportOptions);
|
||||
startJob(theRequestDetails, BulkExportJobParameters);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -319,15 +309,16 @@ public class BulkDataExportProvider {
|
|||
|
||||
validateTargetsExists(theRequestDetails, "Patient", List.of(theIdParam));
|
||||
|
||||
BulkDataExportOptions bulkDataExportOptions = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, theIdParam, theTypePostFetchFilterUrl);
|
||||
validateResourceTypesAllContainPatientSearchParams(bulkDataExportOptions.getResourceTypes());
|
||||
BulkExportJobParameters BulkExportJobParameters = buildPatientBulkExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, theIdParam, theTypePostFetchFilterUrl);
|
||||
validateResourceTypesAllContainPatientSearchParams(BulkExportJobParameters.getResourceTypes());
|
||||
|
||||
startJob(theRequestDetails, bulkDataExportOptions);
|
||||
startJob(theRequestDetails, BulkExportJobParameters);
|
||||
}
|
||||
|
||||
/**
|
||||
* $export-poll-status
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Operation(name = JpaConstants.OPERATION_EXPORT_POLL_STATUS, manualResponse = true, idempotent = true, deleteEnabled = true)
|
||||
public void exportPollStatus(
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, typeName = "string", min = 0, max = 1) IPrimitiveType<String> theJobId,
|
||||
|
@ -338,7 +329,7 @@ public class BulkDataExportProvider {
|
|||
|
||||
// When export-poll-status through POST
|
||||
// Get theJobId from the request details
|
||||
if (theJobId == null){
|
||||
if (theJobId == null) {
|
||||
Parameters parameters = (Parameters) theRequestDetails.getResource();
|
||||
Parameters.ParametersParameterComponent parameter = parameters.getParameter().stream()
|
||||
.filter(param -> param.getName().equals(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID))
|
||||
|
@ -347,22 +338,20 @@ public class BulkDataExportProvider {
|
|||
theJobId = (IPrimitiveType<String>) parameter.getValue();
|
||||
}
|
||||
|
||||
Batch2JobInfo info = myJobRunner.getJobInfo(theJobId.getValueAsString());
|
||||
if (info == null) {
|
||||
throw new ResourceNotFoundException(Msg.code(2040) + "Unknown instance ID: " + theJobId + ". Please check if the input job ID is valid.");
|
||||
}
|
||||
JobInstance info = myJobCoordinator.getInstance(theJobId.getValueAsString());
|
||||
|
||||
if(info.getRequestPartitionId() != null) {
|
||||
BulkExportJobParameters parameters = info.getParameters(BulkExportJobParameters.class);
|
||||
if (parameters.getPartitionId() != null) {
|
||||
// Determine and validate permissions for partition (if needed)
|
||||
RequestPartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null);
|
||||
myRequestPartitionHelperService.validateHasPartitionPermissions(theRequestDetails, "Binary", partitionId);
|
||||
if(!info.getRequestPartitionId().equals(partitionId)){
|
||||
if (!parameters.getPartitionId().equals(partitionId)) {
|
||||
throw new InvalidRequestException(Msg.code(2304) + "Invalid partition in request for Job ID " + theJobId);
|
||||
}
|
||||
}
|
||||
|
||||
switch (info.getStatus()) {
|
||||
case COMPLETE:
|
||||
case COMPLETED:
|
||||
if (theRequestDetails.getRequestType() == RequestTypeEnum.DELETE) {
|
||||
handleDeleteRequest(theJobId, response, info.getStatus());
|
||||
} else {
|
||||
|
@ -405,14 +394,14 @@ public class BulkDataExportProvider {
|
|||
}
|
||||
}
|
||||
break;
|
||||
case ERROR:
|
||||
case FAILED:
|
||||
response.setStatus(Constants.STATUS_HTTP_500_INTERNAL_ERROR);
|
||||
response.setContentType(Constants.CT_FHIR_JSON);
|
||||
|
||||
// Create an OperationOutcome response
|
||||
IBaseOperationOutcome oo = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, "error", info.getErrorMsg(), null, null);
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, oo, "error", info.getErrorMessage(), null, null);
|
||||
myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToWriter(oo, response.getWriter());
|
||||
response.getWriter().close();
|
||||
break;
|
||||
|
@ -420,8 +409,11 @@ public class BulkDataExportProvider {
|
|||
// Deliberate fall through
|
||||
ourLog.warn("Unrecognized status encountered: {}. Treating as BUILDING/SUBMITTED", info.getStatus().name());
|
||||
//noinspection fallthrough
|
||||
case BUILDING:
|
||||
case SUBMITTED:
|
||||
case FINALIZE:
|
||||
case QUEUED:
|
||||
case IN_PROGRESS:
|
||||
case CANCELLED:
|
||||
case ERRORED:
|
||||
if (theRequestDetails.getRequestType() == RequestTypeEnum.DELETE) {
|
||||
handleDeleteRequest(theJobId, response, info.getStatus());
|
||||
} else {
|
||||
|
@ -437,10 +429,10 @@ public class BulkDataExportProvider {
|
|||
}
|
||||
}
|
||||
|
||||
private void handleDeleteRequest(IPrimitiveType<String> theJobId, HttpServletResponse response, BulkExportJobStatusEnum theOrigStatus) throws IOException {
|
||||
private void handleDeleteRequest(IPrimitiveType<String> theJobId, HttpServletResponse response, StatusEnum theOrigStatus) throws IOException {
|
||||
IBaseOperationOutcome outcome = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
Batch2JobOperationResult resultMessage = myJobRunner.cancelInstance(theJobId.getValueAsString());
|
||||
if (theOrigStatus.equals(BulkExportJobStatusEnum.COMPLETE)) {
|
||||
JobOperationResultJson resultMessage = myJobCoordinator.cancelInstance(theJobId.getValueAsString());
|
||||
if (theOrigStatus.equals(StatusEnum.COMPLETED)) {
|
||||
response.setStatus(Constants.STATUS_HTTP_404_NOT_FOUND);
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, outcome, "error", "Job instance <" + theJobId.getValueAsString() + "> was already cancelled or has completed. Nothing to do.", null, null);
|
||||
} else {
|
||||
|
@ -451,7 +443,7 @@ public class BulkDataExportProvider {
|
|||
response.getWriter().close();
|
||||
}
|
||||
|
||||
private String getTransitionTimeOfJobInfo(Batch2JobInfo theInfo) {
|
||||
private String getTransitionTimeOfJobInfo(JobInstance theInfo) {
|
||||
if (theInfo.getEndTime() != null) {
|
||||
return new InstantType(theInfo.getEndTime()).getValueAsString();
|
||||
} else if (theInfo.getStartTime() != null) {
|
||||
|
@ -462,43 +454,43 @@ public class BulkDataExportProvider {
|
|||
}
|
||||
}
|
||||
|
||||
private BulkDataExportOptions buildSystemBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
return buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkDataExportOptions.ExportStyle.SYSTEM, theTypePostFetchFilterUrl);
|
||||
private BulkExportJobParameters buildSystemBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
return buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkExportJobParameters.ExportStyle.SYSTEM, theTypePostFetchFilterUrl);
|
||||
}
|
||||
|
||||
private BulkDataExportOptions buildGroupBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IIdType theGroupId, IPrimitiveType<Boolean> theExpandMdm, IPrimitiveType<String> theExportId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportId, BulkDataExportOptions.ExportStyle.GROUP, theTypePostFetchFilterUrl);
|
||||
bulkDataExportOptions.setGroupId(theGroupId);
|
||||
private BulkExportJobParameters buildGroupBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IIdType theGroupId, IPrimitiveType<Boolean> theExpandMdm, IPrimitiveType<String> theExportId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportId, ExportStyle.GROUP, theTypePostFetchFilterUrl);
|
||||
BulkExportJobParameters.setGroupId(toStringValue(theGroupId));
|
||||
|
||||
boolean mdm = false;
|
||||
if (theExpandMdm != null) {
|
||||
mdm = theExpandMdm.getValue();
|
||||
}
|
||||
bulkDataExportOptions.setExpandMdm(mdm);
|
||||
BulkExportJobParameters.setExpandMdm(mdm);
|
||||
|
||||
return bulkDataExportOptions;
|
||||
return BulkExportJobParameters;
|
||||
}
|
||||
|
||||
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, List<IPrimitiveType<String>> thePatientIds, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
private BulkExportJobParameters buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, List<IPrimitiveType<String>> thePatientIds, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
IPrimitiveType<String> type = theType;
|
||||
if (type == null) {
|
||||
// Type is optional, but the job requires it
|
||||
type = new StringDt("Patient");
|
||||
}
|
||||
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, type, theSince, theTypeFilter, theExportIdentifier, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl);
|
||||
BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, type, theSince, theTypeFilter, theExportIdentifier, ExportStyle.PATIENT, theTypePostFetchFilterUrl);
|
||||
if (thePatientIds != null) {
|
||||
bulkDataExportOptions.setPatientIds(thePatientIds.stream().map((pid) -> new IdType(pid.getValueAsString())).collect(Collectors.toSet()));
|
||||
BulkExportJobParameters.setPatientIds(thePatientIds.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toSet()));
|
||||
}
|
||||
return bulkDataExportOptions;
|
||||
return BulkExportJobParameters;
|
||||
}
|
||||
|
||||
private BulkDataExportOptions buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, IIdType thePatientId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
BulkDataExportOptions bulkDataExportOptions = buildBulkDataExportOptions(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, BulkDataExportOptions.ExportStyle.PATIENT, theTypePostFetchFilterUrl);
|
||||
bulkDataExportOptions.setPatientIds(Collections.singleton(thePatientId));
|
||||
return bulkDataExportOptions;
|
||||
private BulkExportJobParameters buildPatientBulkExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, IIdType thePatientId, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
BulkExportJobParameters BulkExportJobParameters = buildBulkExportJobParameters(theOutputFormat, theType, theSince, theTypeFilter, theExportIdentifier, ExportStyle.PATIENT, theTypePostFetchFilterUrl);
|
||||
BulkExportJobParameters.setPatientIds(Collections.singleton(thePatientId.getValue()));
|
||||
return BulkExportJobParameters;
|
||||
}
|
||||
|
||||
private BulkDataExportOptions buildBulkDataExportOptions(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, BulkDataExportOptions.ExportStyle theExportStyle, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
private BulkExportJobParameters buildBulkExportJobParameters(IPrimitiveType<String> theOutputFormat, IPrimitiveType<String> theType, IPrimitiveType<Date> theSince, List<IPrimitiveType<String>> theTypeFilter, IPrimitiveType<String> theExportIdentifier, BulkExportJobParameters.ExportStyle theExportStyle, List<IPrimitiveType<String>> theTypePostFetchFilterUrl) {
|
||||
String outputFormat = theOutputFormat != null ? theOutputFormat.getValueAsString() : Constants.CT_FHIR_NDJSON;
|
||||
|
||||
Set<String> resourceTypes = null;
|
||||
|
@ -518,23 +510,23 @@ public class BulkDataExportProvider {
|
|||
Set<String> typeFilters = splitTypeFilters(theTypeFilter);
|
||||
Set<String> typePostFetchFilterUrls = splitTypeFilters(theTypePostFetchFilterUrl);
|
||||
|
||||
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
|
||||
bulkDataExportOptions.setFilters(typeFilters);
|
||||
bulkDataExportOptions.setPostFetchFilterUrls(typePostFetchFilterUrls);
|
||||
bulkDataExportOptions.setExportStyle(theExportStyle);
|
||||
bulkDataExportOptions.setExportIdentifier(exportIdentifier);
|
||||
bulkDataExportOptions.setSince(since);
|
||||
bulkDataExportOptions.setResourceTypes(resourceTypes);
|
||||
bulkDataExportOptions.setOutputFormat(outputFormat);
|
||||
return bulkDataExportOptions;
|
||||
BulkExportJobParameters BulkExportJobParameters = new BulkExportJobParameters();
|
||||
BulkExportJobParameters.setFilters(typeFilters);
|
||||
BulkExportJobParameters.setPostFetchFilterUrls(typePostFetchFilterUrls);
|
||||
BulkExportJobParameters.setExportStyle(theExportStyle);
|
||||
BulkExportJobParameters.setExportIdentifier(exportIdentifier);
|
||||
BulkExportJobParameters.setSince(since);
|
||||
BulkExportJobParameters.setResourceTypes(resourceTypes);
|
||||
BulkExportJobParameters.setOutputFormat(outputFormat);
|
||||
return BulkExportJobParameters;
|
||||
}
|
||||
|
||||
public void writePollingLocationToResponseHeaders(ServletRequestDetails theRequestDetails, JobInfo theOutcome) {
|
||||
public void writePollingLocationToResponseHeaders(ServletRequestDetails theRequestDetails, String theInstanceId) {
|
||||
String serverBase = getServerBase(theRequestDetails);
|
||||
if (serverBase == null) {
|
||||
throw new InternalErrorException(Msg.code(2136) + "Unable to get the server base.");
|
||||
}
|
||||
String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theOutcome.getJobMetadataId();
|
||||
String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + theInstanceId;
|
||||
pollLocation = UrlUtil.sanitizeHeaderValue(pollLocation);
|
||||
|
||||
HttpServletResponse response = theRequestDetails.getServletResponse();
|
||||
|
@ -559,20 +551,12 @@ public class BulkDataExportProvider {
|
|||
Arrays
|
||||
.stream(typeFilterString.split(FARM_TO_TABLE_TYPE_FILTER_REGEX))
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.forEach(t -> retVal.add(t));
|
||||
.forEach(retVal::add);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public static void validatePreferAsyncHeader(ServletRequestDetails theRequestDetails, String theOperationName) {
|
||||
String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER);
|
||||
PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader);
|
||||
if (!prefer.getRespondAsync()) {
|
||||
throw new InvalidRequestException(Msg.code(513) + "Must request async processing for " + theOperationName);
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
|
@ -582,4 +566,12 @@ public class BulkDataExportProvider {
|
|||
public void setDaoRegistry(DaoRegistry theDaoRegistry) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
}
|
||||
|
||||
public static void validatePreferAsyncHeader(ServletRequestDetails theRequestDetails, String theOperationName) {
|
||||
String preferHeader = theRequestDetails.getHeader(Constants.HEADER_PREFER);
|
||||
PreferHeader prefer = RestfulServerUtils.parsePreferHeader(null, preferHeader);
|
||||
if (!prefer.getRespondAsync()) {
|
||||
throw new InvalidRequestException(Msg.code(513) + "Must request async processing for " + theOperationName);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -23,7 +23,7 @@ import ca.uhn.fhir.batch2.api.VoidModel;
|
|||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobDefinition;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
|
|
|
@ -27,7 +27,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
|||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.ChunkOutcome;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
|
|
|
@ -20,15 +20,13 @@
|
|||
package ca.uhn.fhir.batch2.jobs.export;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
|
||||
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -83,7 +81,7 @@ public class BulkExportJobParametersValidator implements IJobParametersValidator
|
|||
}
|
||||
|
||||
// validate for group
|
||||
BulkDataExportOptions.ExportStyle style = theParameters.getExportStyle();
|
||||
BulkExportJobParameters.ExportStyle style = theParameters.getExportStyle();
|
||||
if (style == null) {
|
||||
errorMsgs.add("Export style is required");
|
||||
}
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.export;
|
||||
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class BulkExportUtil {
|
||||
private static final Logger ourLog = getLogger(BulkExportUtil.class);
|
||||
|
||||
private BulkExportUtil() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts Batch2 StatusEnum -> BulkExportJobStatusEnum
|
||||
*/
|
||||
public static BulkExportJobStatusEnum fromBatchStatus(StatusEnum status) {
|
||||
switch (status) {
|
||||
case QUEUED:
|
||||
case FINALIZE:
|
||||
return BulkExportJobStatusEnum.SUBMITTED;
|
||||
case COMPLETED :
|
||||
return BulkExportJobStatusEnum.COMPLETE;
|
||||
case ERRORED:
|
||||
case IN_PROGRESS:
|
||||
return BulkExportJobStatusEnum.BUILDING;
|
||||
default:
|
||||
ourLog.warn("Unrecognized status {}; treating as FAILED/CANCELLED/ERRORED", status.name());
|
||||
case FAILED:
|
||||
case CANCELLED:
|
||||
return BulkExportJobStatusEnum.ERROR;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -24,7 +24,10 @@ import ca.uhn.fhir.batch2.api.IJobStepWorker;
|
|||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
|
@ -90,6 +93,9 @@ public class ExpandResourcesStep implements IJobStepWorker<BulkExportJobParamete
|
|||
@Autowired
|
||||
private InMemoryResourceMatcher myInMemoryResourceMatcher;
|
||||
|
||||
@Autowired
|
||||
private InterceptorService myInterceptorService;
|
||||
|
||||
private volatile ResponseTerminologyTranslationSvc myResponseTerminologyTranslationSvc;
|
||||
|
||||
@Nonnull
|
||||
|
@ -113,6 +119,7 @@ public class ExpandResourcesStep implements IJobStepWorker<BulkExportJobParamete
|
|||
.stream()
|
||||
.filter(t -> t.substring(0, t.indexOf('?')).equals(resourceType))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (!postFetchFilterUrls.isEmpty()) {
|
||||
applyPostFetchFiltering(allResources, postFetchFilterUrls, instanceId, chunkId);
|
||||
}
|
||||
|
@ -132,6 +139,19 @@ public class ExpandResourcesStep implements IJobStepWorker<BulkExportJobParamete
|
|||
terminologyTranslationSvc.processResourcesForTerminologyTranslation(allResources);
|
||||
}
|
||||
|
||||
// Interceptor call
|
||||
if (myInterceptorService.hasHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION)) {
|
||||
for (Iterator<IBaseResource> iter = allResources.iterator(); iter.hasNext(); ) {
|
||||
HookParams params = new HookParams()
|
||||
.add(BulkExportJobParameters.class, theStepExecutionDetails.getParameters())
|
||||
.add(IBaseResource.class, iter.next());
|
||||
boolean outcome = myInterceptorService.callHooks(Pointcut.STORAGE_BULK_EXPORT_RESOURCE_INCLUSION, params);
|
||||
if (!outcome) {
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// encode them - Key is resource type, Value is a collection of serialized resources of that type
|
||||
ListMultimap<String, String> resources = encodeToString(allResources, parameters);
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
|||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.api.VoidModel;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
|
|
|
@ -25,7 +25,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
|||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
|
|
|
@ -59,7 +59,7 @@ import java.util.Date;
|
|||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider.validatePreferAsyncHeader;
|
||||
import static ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider.validatePreferAsyncHeader;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class BulkDataImportProvider {
|
||||
|
|
|
@ -1,136 +0,0 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.services;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.api.JobOperationResultJson;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkExportUtil;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class Batch2JobRunnerImpl implements IBatch2JobRunner {
|
||||
private static final Logger ourLog = getLogger(IBatch2JobRunner.class);
|
||||
|
||||
private final IJobCoordinator myJobCoordinator;
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
|
||||
public Batch2JobRunnerImpl(IJobCoordinator theJobCoordinator, FhirContext theFhirContext) {
|
||||
myFhirContext = theFhirContext;
|
||||
myJobCoordinator = theJobCoordinator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Batch2JobStartResponse startNewJob(RequestDetails theRequestDetails, Batch2BaseJobParameters theParameters) {
|
||||
switch (theParameters.getJobDefinitionId()) {
|
||||
case Batch2JobDefinitionConstants.BULK_EXPORT:
|
||||
if (theParameters instanceof BulkExportParameters) {
|
||||
return startBatch2BulkExportJob(theRequestDetails, (BulkExportParameters) theParameters);
|
||||
}
|
||||
else {
|
||||
ourLog.error("Invalid parameters for " + Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// Dear future devs - add your case above
|
||||
ourLog.error("Invalid JobDefinitionId " + theParameters.getJobDefinitionId());
|
||||
break;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Batch2JobInfo getJobInfo(String theJobId) {
|
||||
JobInstance instance = myJobCoordinator.getInstance(theJobId);
|
||||
if (instance == null) {
|
||||
throw new ResourceNotFoundException(Msg.code(2240) + " : " + theJobId);
|
||||
}
|
||||
return fromJobInstanceToBatch2JobInfo(instance);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Batch2JobOperationResult cancelInstance(String theJobId) throws ResourceNotFoundException {
|
||||
JobOperationResultJson cancelResult = myJobCoordinator.cancelInstance(theJobId);
|
||||
if (cancelResult == null) {
|
||||
throw new ResourceNotFoundException(Msg.code(2195) + " : " + theJobId);
|
||||
}
|
||||
return fromJobOperationResultToBatch2JobOperationResult(cancelResult);
|
||||
}
|
||||
|
||||
private Batch2JobOperationResult fromJobOperationResultToBatch2JobOperationResult(@Nonnull JobOperationResultJson theResultJson) {
|
||||
Batch2JobOperationResult result = new Batch2JobOperationResult();
|
||||
result.setOperation(theResultJson.getOperation());
|
||||
result.setMessage(theResultJson.getMessage());
|
||||
result.setSuccess(theResultJson.getSuccess());
|
||||
return result;
|
||||
}
|
||||
|
||||
private Batch2JobInfo fromJobInstanceToBatch2JobInfo(@Nonnull JobInstance theInstance) {
|
||||
Batch2JobInfo info = new Batch2JobInfo();
|
||||
info.setJobId(theInstance.getInstanceId());
|
||||
// should convert this to a more generic enum for all batch2 (which is what it seems like)
|
||||
// or use the status enum only (combine with bulk export enum)
|
||||
// on the Batch2JobInfo
|
||||
info.setStatus(BulkExportUtil.fromBatchStatus(theInstance.getStatus()));
|
||||
info.setCancelled(theInstance.isCancelled());
|
||||
info.setStartTime(theInstance.getStartTime());
|
||||
info.setEndTime(theInstance.getEndTime());
|
||||
info.setReport(theInstance.getReport());
|
||||
info.setErrorMsg(theInstance.getErrorMessage());
|
||||
info.setCombinedRecordsProcessed(theInstance.getCombinedRecordsProcessed());
|
||||
if ( Batch2JobDefinitionConstants.BULK_EXPORT.equals(theInstance.getJobDefinitionId())) {
|
||||
BulkExportJobParameters parameters = theInstance.getParameters(BulkExportJobParameters.class);
|
||||
info.setRequestPartitionId(parameters.getPartitionId());
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
private Batch2JobStartResponse startBatch2BulkExportJob(RequestDetails theRequestDetails, BulkExportParameters theParameters) {
|
||||
JobInstanceStartRequest request = createStartRequest(theParameters);
|
||||
BulkExportJobParameters parameters = BulkExportJobParameters.createFromExportJobParameters(theParameters);
|
||||
request.setParameters(parameters);
|
||||
return myJobCoordinator.startInstance(theRequestDetails, request);
|
||||
}
|
||||
|
||||
private JobInstanceStartRequest createStartRequest(Batch2BaseJobParameters theParameters) {
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
request.setJobDefinitionId(theParameters.getJobDefinitionId());
|
||||
request.setUseCache(theParameters.isUseExistingJobsFirst());
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -1,10 +1,9 @@
|
|||
package ca.uhn.fhir.batch2.jobs.export;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
|
@ -39,7 +38,7 @@ public class BulkExportJobParametersValidatorTest {
|
|||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
parameters.setResourceTypes(Arrays.asList("Patient", "Observation"));
|
||||
parameters.setOutputFormat(Constants.CT_FHIR_NDJSON);
|
||||
parameters.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
parameters.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
return parameters;
|
||||
}
|
||||
|
||||
|
@ -96,7 +95,7 @@ public class BulkExportJobParametersValidatorTest {
|
|||
public void validate_validParametersForPatient_returnsEmptyList() {
|
||||
// setup
|
||||
BulkExportJobParameters parameters = createSystemExportParameters();
|
||||
parameters.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
parameters.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
|
||||
// when
|
||||
when(myDaoRegistry.isResourceTypeSupported(anyString()))
|
||||
|
@ -115,7 +114,7 @@ public class BulkExportJobParametersValidatorTest {
|
|||
// setup
|
||||
String resourceType = "notValid";
|
||||
BulkExportJobParameters parameters = createSystemExportParameters();
|
||||
parameters.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
|
||||
parameters.setExportStyle(BulkExportJobParameters.ExportStyle.SYSTEM);
|
||||
parameters.setResourceTypes(Collections.singletonList(resourceType));
|
||||
|
||||
// test
|
||||
|
@ -131,7 +130,7 @@ public class BulkExportJobParametersValidatorTest {
|
|||
public void validate_validateParametersForGroup_returnsEmptyList() {
|
||||
// setup
|
||||
BulkExportJobParameters parameters = createSystemExportParameters();
|
||||
parameters.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
parameters.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
parameters.setGroupId("groupId");
|
||||
parameters.setExpandMdm(true);
|
||||
|
||||
|
@ -151,7 +150,7 @@ public class BulkExportJobParametersValidatorTest {
|
|||
public void validate_groupParametersWithoutGroupId_returnsError() {
|
||||
// setup
|
||||
BulkExportJobParameters parameters = createSystemExportParameters();
|
||||
parameters.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
|
||||
parameters.setExportStyle(BulkExportJobParameters.ExportStyle.GROUP);
|
||||
|
||||
// test
|
||||
List<String> result = myValidator.validate(null, parameters);
|
||||
|
|
|
@ -4,7 +4,8 @@ package ca.uhn.fhir.batch2.jobs.export;
|
|||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
|
||||
|
@ -21,7 +22,6 @@ import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService;
|
|||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
|
||||
|
@ -67,6 +67,9 @@ public class ExpandResourcesStepTest {
|
|||
@Mock
|
||||
IIdHelperService<JpaPid> myIdHelperService;
|
||||
|
||||
@Spy
|
||||
private InterceptorService myInterceptorService = new InterceptorService();
|
||||
|
||||
@Spy
|
||||
private FhirContext myFhirContext = FhirContext.forR4Cached();
|
||||
|
||||
|
@ -82,7 +85,7 @@ public class ExpandResourcesStepTest {
|
|||
private BulkExportJobParameters createParameters(boolean thePartitioned) {
|
||||
BulkExportJobParameters parameters = new BulkExportJobParameters();
|
||||
parameters.setResourceTypes(Arrays.asList("Patient", "Observation"));
|
||||
parameters.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
parameters.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
parameters.setOutputFormat("json");
|
||||
parameters.setSince(new Date());
|
||||
if (thePartitioned) {
|
||||
|
|
|
@ -4,7 +4,7 @@ import ca.uhn.fhir.batch2.api.IJobDataSink;
|
|||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.api.VoidModel;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ResourceIdList;
|
||||
import ca.uhn.fhir.batch2.jobs.models.BatchResourceId;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
|
@ -13,7 +13,6 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
|||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.ExportPIDIteratorParameters;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ch.qos.logback.classic.Logger;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
|
@ -77,7 +76,7 @@ public class FetchResourceIdsStepTest {
|
|||
BulkExportJobParameters jobParameters = new BulkExportJobParameters();
|
||||
jobParameters.setSince(new Date());
|
||||
jobParameters.setOutputFormat("json");
|
||||
jobParameters.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
|
||||
jobParameters.setExportStyle(BulkExportJobParameters.ExportStyle.PATIENT);
|
||||
jobParameters.setResourceTypes(Arrays.asList("Patient", "Observation"));
|
||||
if (thePartitioned) {
|
||||
jobParameters.setPartitionId(RequestPartitionId.fromPartitionName("Partition-A"));
|
||||
|
|
|
@ -6,7 +6,7 @@ import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
|||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.ExpandedResourcesList;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
|
|
|
@ -1,198 +0,0 @@
|
|||
package ca.uhn.fhir.batch2.jobs.services;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ch.qos.logback.classic.Level;
|
||||
import ch.qos.logback.classic.Logger;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import ch.qos.logback.core.read.ListAppender;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.ArgumentMatchers.isNotNull;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class Batch2JobRunnerImplTest {
|
||||
|
||||
private static final Logger ourLog = (Logger) LoggerFactory.getLogger(IBatch2JobRunner.class);
|
||||
|
||||
@Mock
|
||||
private ListAppender<ILoggingEvent> myAppender;
|
||||
|
||||
@Mock
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@InjectMocks
|
||||
private Batch2JobRunnerImpl myJobRunner;
|
||||
|
||||
@BeforeEach
|
||||
public void init() {
|
||||
ourLog.addAppender(myAppender);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void end() {
|
||||
ourLog.detachAppender(myAppender);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void startJob_invalidJobDefinitionId_logsAndDoesNothing() {
|
||||
// setup
|
||||
String jobId = "invalid";
|
||||
ourLog.setLevel(Level.ERROR);
|
||||
|
||||
// test
|
||||
myJobRunner.startNewJob(new SystemRequestDetails(), new Batch2BaseJobParameters(jobId));
|
||||
|
||||
// verify
|
||||
ArgumentCaptor<ILoggingEvent> captor = ArgumentCaptor.forClass(ILoggingEvent.class);
|
||||
verify(myAppender).doAppend(captor.capture());
|
||||
assertTrue(captor.getValue().getMessage()
|
||||
.contains("Invalid JobDefinitionId " + jobId));
|
||||
verify(myJobCoordinator, never())
|
||||
.startInstance(any(JobInstanceStartRequest.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void startJob_invalidParametersForExport_logsAndDoesNothing() {
|
||||
// setup
|
||||
ourLog.setLevel(Level.ERROR);
|
||||
|
||||
// test
|
||||
myJobRunner.startNewJob(new SystemRequestDetails(), new Batch2BaseJobParameters(Batch2JobDefinitionConstants.BULK_EXPORT));
|
||||
|
||||
// verify
|
||||
ArgumentCaptor<ILoggingEvent> captor = ArgumentCaptor.forClass(ILoggingEvent.class);
|
||||
verify(myAppender).doAppend(captor.capture());
|
||||
String msg = captor.getValue().getMessage();
|
||||
String expectedMsg = "Invalid parameters for " + Batch2JobDefinitionConstants.BULK_EXPORT;
|
||||
assertTrue(msg
|
||||
.contains(expectedMsg),
|
||||
msg + " != " + expectedMsg);
|
||||
verify(myJobCoordinator, never())
|
||||
.startInstance(any(JobInstanceStartRequest.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void startJob_bulkExport_callsAsExpected() {
|
||||
// setup
|
||||
BulkExportParameters parameters = new BulkExportParameters(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
parameters.setResourceTypes(Collections.singletonList("Patient"));
|
||||
|
||||
// when
|
||||
String jobInstanceId = "test_job_instance";
|
||||
Date end = new Date();
|
||||
Date start = new Date(end.getTime()-100);
|
||||
JobInstance mockJobInstance = createMockJobInstance(parameters, jobInstanceId, start, end);
|
||||
when(myJobCoordinator.getInstance(eq(jobInstanceId))).thenReturn(mockJobInstance);
|
||||
|
||||
// test
|
||||
myJobRunner.startNewJob(new SystemRequestDetails(), parameters);
|
||||
|
||||
// verify
|
||||
ArgumentCaptor<JobInstanceStartRequest> captor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
||||
verify(myJobCoordinator)
|
||||
.startInstance(isNotNull(), captor.capture());
|
||||
JobInstanceStartRequest val = captor.getValue();
|
||||
// we need to verify something in the parameters
|
||||
ourLog.info(val.getParameters());
|
||||
assertTrue(val.getParameters().contains("Patient"));
|
||||
assertFalse(val.getParameters().contains("allPartitions"));
|
||||
assertFalse(val.getParameters().contains("Partition-A"));
|
||||
|
||||
Batch2JobInfo jobInfo = myJobRunner.getJobInfo(jobInstanceId);
|
||||
verifyBatch2JobInfo(jobInfo, jobInstanceId, start, end, null);
|
||||
}
|
||||
|
||||
private JobInstance createMockJobInstance(BulkExportParameters theParameters, String theJobInstanceId, Date start, Date end) {
|
||||
JobInstance mockJobInstance = new JobInstance();
|
||||
mockJobInstance.setInstanceId(theJobInstanceId);
|
||||
mockJobInstance.setStatus(StatusEnum.COMPLETED);
|
||||
mockJobInstance.setCancelled(false);
|
||||
mockJobInstance.setStartTime(start);
|
||||
mockJobInstance.setEndTime(end);
|
||||
mockJobInstance.setReport("test report");
|
||||
mockJobInstance.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
mockJobInstance.setParameters(BulkExportJobParameters.createFromExportJobParameters(theParameters));
|
||||
return mockJobInstance;
|
||||
}
|
||||
|
||||
private void verifyBatch2JobInfo(Batch2JobInfo jobInfo, String theJobId, Date start, Date end, RequestPartitionId partitionId) {
|
||||
assertEquals(jobInfo.getJobId(), theJobId );
|
||||
assertFalse(jobInfo.isCancelled());
|
||||
assertEquals(jobInfo.getStartTime(), start);
|
||||
assertEquals(jobInfo.getEndTime(), end);
|
||||
assertEquals(jobInfo.getReport(), "test report");
|
||||
assertEquals(jobInfo.getStatus(), BulkExportJobStatusEnum.COMPLETE);
|
||||
if (partitionId != null) {
|
||||
assertEquals(jobInfo.getRequestPartitionId(), partitionId);
|
||||
} else {
|
||||
assertNull(jobInfo.getRequestPartitionId());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void startJob_bulkExport_partitioned() {
|
||||
// setup
|
||||
BulkExportParameters parameters = new BulkExportParameters(Batch2JobDefinitionConstants.BULK_EXPORT);
|
||||
parameters.setResourceTypes(Collections.singletonList("Patient"));
|
||||
RequestPartitionId partitionId = RequestPartitionId.fromPartitionName("Partition-A");
|
||||
parameters.setPartitionId(partitionId);
|
||||
|
||||
// when
|
||||
String jobInstanceId = "test_job_instance";
|
||||
Date end = new Date();
|
||||
Date start = new Date(end.getTime()-100);
|
||||
JobInstance mockJobInstance = createMockJobInstance(parameters, jobInstanceId, start, end);
|
||||
when(myJobCoordinator.getInstance(eq(jobInstanceId))).thenReturn(mockJobInstance);
|
||||
|
||||
// test
|
||||
myJobRunner.startNewJob(new SystemRequestDetails(), parameters);
|
||||
|
||||
// verify
|
||||
ArgumentCaptor<JobInstanceStartRequest> captor = ArgumentCaptor.forClass(JobInstanceStartRequest.class);
|
||||
verify(myJobCoordinator)
|
||||
.startInstance(isNotNull(), captor.capture());
|
||||
JobInstanceStartRequest val = captor.getValue();
|
||||
// we need to verify something in the parameters
|
||||
ourLog.info(val.getParameters());
|
||||
assertTrue(val.getParameters().contains("Patient"));
|
||||
assertTrue(val.getParameters().contains("Partition-A"));
|
||||
assertTrue(val.getParameters().contains("\"allPartitions\":false"));
|
||||
|
||||
Batch2JobInfo jobInfo = myJobRunner.getJobInfo(jobInstanceId);
|
||||
verifyBatch2JobInfo(jobInfo, jobInstanceId, start, end, partitionId);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.7.10-SNAPSHOT</version>
|
||||
<version>6.7.11-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -29,6 +29,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import org.springframework.data.domain.Page;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
@ -67,6 +68,7 @@ public interface IJobCoordinator {
|
|||
* @return Returns the current instance details
|
||||
* @throws ResourceNotFoundException If the instance ID can not be found
|
||||
*/
|
||||
@Nonnull
|
||||
JobInstance getInstance(String theInstanceId) throws ResourceNotFoundException;
|
||||
|
||||
/**
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue