3531 implement bulk processors (#3566)

* base batch2 bulk export steps - unimplemented

* added the basic steps

* adding tests

* we have added the tests

* added tests for bulk export stages

* implementing jpa

* 3531 adding the jpabukexportprocessorimplementation

* 3531 first step for mongo bulk export

* jpa stuff updated

* 3531 hapi fhir stuff

* trying to fix up tests

* fixing test

* fixed most tests in batch export

* deprecate

* need some more guidance here

* fixing some tests

* some steps taken

* blah

* stashing

* test

* fixing tests round 1

* many a changes

* updating version

* small updates

* merging in mster again

* fixing up serialization

* added the permission checkers

* cleanup and sonarkube crap

* some more tests

* updating adding tests

* crimes

* blah

* updatingmsg number again

* removal of commented code

* cleanup

* merge confflicts

* fixing msg

* review fixes round 1

* more review fixes

* merging in master

* fixing poms

* review points

* test fix

* test fix

* fix test race condition

* truncate error

* add stack trace to error

* rename to IT

* fix race condition in test

* finally fixed race condition in test.  this time I'm optimistic hapi ci will finally pass

* fix mock test.  not a fan of mock tests like this.

* any job definition with a reducer step must be gated.

* fixed common batch bean wiring

* fix test

* lazy init ResponseTerminologyTranslationSvc

* change the way we lazy init ResponseTerminologyTranslationSvc

* change the way we lazy init ResponseTerminologyTranslationSvc (for the third time.  this is super tricky!)

* disabling testGoldenResourceWithCircularReferenceCanBeCleared for now

Co-authored-by: leif stawnyczy <leifstawnyczy@leifs-MacBook-Pro.local>
Co-authored-by: Ken Stevens <ken@smilecdr.com>
This commit is contained in:
TipzCM 2022-07-17 17:49:03 -04:00 committed by GitHub
parent 398ed99f86
commit 8ed0a18ef4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
180 changed files with 5633 additions and 4918 deletions

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -25,7 +25,7 @@ public final class Msg {
/** /**
* IMPORTANT: Please update the following comment after you add a new code * IMPORTANT: Please update the following comment after you add a new code
* Last code value: 2101 * Last code value: 2106
*/ */
private Msg() {} private Msg() {}

View File

@ -28,10 +28,12 @@ import java.util.HashMap;
*/ */
public enum PreferReturnEnum { public enum PreferReturnEnum {
REPRESENTATION(Constants.HEADER_PREFER_RETURN_REPRESENTATION), MINIMAL(Constants.HEADER_PREFER_RETURN_MINIMAL), OPERATION_OUTCOME(Constants.HEADER_PREFER_RETURN_OPERATION_OUTCOME); REPRESENTATION(Constants.HEADER_PREFER_RETURN_REPRESENTATION),
MINIMAL(Constants.HEADER_PREFER_RETURN_MINIMAL),
OPERATION_OUTCOME(Constants.HEADER_PREFER_RETURN_OPERATION_OUTCOME);
private static HashMap<String, PreferReturnEnum> ourValues; private static HashMap<String, PreferReturnEnum> ourValues;
private String myHeaderValue; private final String myHeaderValue;
PreferReturnEnum(String theHeaderValue) { PreferReturnEnum(String theHeaderValue) {
myHeaderValue = theHeaderValue; myHeaderValue = theHeaderValue;

View File

@ -71,9 +71,6 @@ ca.uhn.fhir.validation.FhirValidator.noPhError=Ph-schematron library not found o
ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected during validation ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected during validation
# JPA Messages # JPA Messages
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check.

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -49,7 +49,6 @@ public class NonPersistedBatchConfigurer extends DefaultBatchConfigurer {
return myHapiPlatformTransactionManager; return myHapiPlatformTransactionManager;
} }
@Override @Override
protected JobRepository createJobRepository() throws Exception { protected JobRepository createJobRepository() throws Exception {
MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean(); MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean();

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
@ -36,46 +36,8 @@
</dependency> </dependency>
</dependencies> </dependencies>
<profiles> <build>
<!-- For releases, we need to generate javadoc and sources JAR --> <plugins>
<profile> </plugins>
<id>release</id> </build>
<activation>
<activeByDefault>false</activeByDefault>
<property>
<name>deployToSonatype</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.4.0</version>
<executions>
<execution>
<id>attach-javadoc</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project> </project>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.batch2.jobs.imprt.BulkImportJobParameters;
import ca.uhn.fhir.batch2.jobs.imprt.BulkDataImportProvider; import ca.uhn.fhir.batch2.jobs.imprt.BulkDataImportProvider;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor; import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
import ca.uhn.fhir.test.utilities.HttpClientExtension; import ca.uhn.fhir.test.utilities.HttpClientExtension;
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension; import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
@ -81,6 +82,12 @@ public class BulkImportCommandTest {
BulkImportCommand.setEndNowForUnitTest(true); BulkImportCommand.setEndNowForUnitTest(true);
} }
private Batch2JobStartResponse createJobStartResponse(String theId) {
Batch2JobStartResponse response = new Batch2JobStartResponse();
response.setJobId(theId);
return response;
}
@Test @Test
public void testBulkImport() throws IOException { public void testBulkImport() throws IOException {
@ -89,7 +96,8 @@ public class BulkImportCommandTest {
writeNdJsonFileToTempDirectory(fileContents1, "file1.json"); writeNdJsonFileToTempDirectory(fileContents1, "file1.json");
writeNdJsonFileToTempDirectory(fileContents2, "file2.json"); writeNdJsonFileToTempDirectory(fileContents2, "file2.json");
when(myJobCoordinator.startInstance(any())).thenReturn("THE-JOB-ID"); when(myJobCoordinator.startInstance(any()))
.thenReturn(createJobStartResponse("THE-JOB-ID"));
// Start the command in a separate thread // Start the command in a separate thread
new Thread(() -> App.main(new String[]{ new Thread(() -> App.main(new String[]{
@ -123,7 +131,8 @@ public class BulkImportCommandTest {
writeNdJsonFileToTempDirectory(fileContents1, "file1.json.gz"); writeNdJsonFileToTempDirectory(fileContents1, "file1.json.gz");
writeNdJsonFileToTempDirectory(fileContents2, "file2.json.gz"); writeNdJsonFileToTempDirectory(fileContents2, "file2.json.gz");
when(myJobCoordinator.startInstance(any())).thenReturn("THE-JOB-ID"); when(myJobCoordinator.startInstance(any()))
.thenReturn(createJobStartResponse("THE-JOB-ID"));
// Start the command in a separate thread // Start the command in a separate thread
new Thread(() -> App.main(new String[]{ new Thread(() -> App.main(new String[]{

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath> <relativePath>../../hapi-deployable-pom</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.batch;
*/ */
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor; import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
import org.springframework.batch.core.configuration.annotation.StepScope; import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
@ -30,12 +29,6 @@ import org.springframework.context.annotation.Configuration;
public class CommonBatchJobConfig { public class CommonBatchJobConfig {
public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1; public static final int MINUTES_IN_FUTURE_TO_PROCESS_FROM = 1;
@Bean
@StepScope
public PidToIBaseResourceProcessor pidToResourceProcessor() {
return new PidToIBaseResourceProcessor();
}
@Bean @Bean
@StepScope @StepScope
public GoldenResourceAnnotatingProcessor goldenResourceAnnotatingProcessor() { public GoldenResourceAnnotatingProcessor goldenResourceAnnotatingProcessor() {

View File

@ -1,78 +0,0 @@
package ca.uhn.fhir.jpa.batch.processor;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
/**
* Reusable Item Processor which converts ResourcePersistentIds to their IBaseResources
*/
public class PidToIBaseResourceProcessor implements ItemProcessor<List<ResourcePersistentId>, List<IBaseResource>> {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
@Autowired
private SearchBuilderFactory mySearchBuilderFactory;
@Autowired
private DaoRegistry myDaoRegistry;
@Value("#{stepExecutionContext['resourceType']}")
private String myResourceType;
@Autowired
private FhirContext myContext;
@Override
public List<IBaseResource> process(List<ResourcePersistentId> theResourcePersistentId) {
String collect = theResourcePersistentId.stream().map(pid -> pid.getId().toString()).collect(Collectors.joining(","));
ourLog.trace("Processing PIDs: {}" + collect);
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(myResourceType);
Class<? extends IBaseResource> resourceTypeClass = myContext.getResourceDefinition(myResourceType).getImplementingClass();
ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, myResourceType, resourceTypeClass);
List<IBaseResource> outgoing = new ArrayList<>();
sb.loadResourcesByPid(theResourcePersistentId, Collections.emptyList(), outgoing, false, null);
ourLog.trace("Loaded resources: {}", outgoing.stream().filter(t -> t != null).map(t -> t.getIdElement().getValue()).collect(Collectors.joining(", ")));
return outgoing;
}
}

View File

@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.batch2;
import ca.uhn.fhir.batch2.api.IJobPersistence; import ca.uhn.fhir.batch2.api.IJobPersistence;
import ca.uhn.fhir.batch2.api.JobOperationResultJson; import ca.uhn.fhir.batch2.api.JobOperationResultJson;
import ca.uhn.fhir.batch2.coordinator.BatchWorkChunk; import ca.uhn.fhir.batch2.coordinator.BatchWorkChunk;
import ca.uhn.fhir.batch2.model.FetchJobInstancesRequest;
import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.MarkWorkChunkAsErrorRequest; import ca.uhn.fhir.batch2.model.MarkWorkChunkAsErrorRequest;
import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.StatusEnum;
@ -31,10 +32,14 @@ import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity; import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity; import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
import ca.uhn.fhir.jpa.util.JobInstanceUtil;
import ca.uhn.fhir.model.api.PagingIterator; import ca.uhn.fhir.model.api.PagingIterator;
import org.apache.commons.collections4.ListUtils; import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
@ -53,6 +58,7 @@ import static org.apache.commons.lang3.StringUtils.isBlank;
@Transactional @Transactional
public class JpaJobPersistenceImpl implements IJobPersistence { public class JpaJobPersistenceImpl implements IJobPersistence {
private static final Logger ourLog = LoggerFactory.getLogger(JpaJobPersistenceImpl.class);
private final IBatch2JobInstanceRepository myJobInstanceRepository; private final IBatch2JobInstanceRepository myJobInstanceRepository;
private final IBatch2WorkChunkRepository myWorkChunkRepository; private final IBatch2WorkChunkRepository myWorkChunkRepository;
@ -78,6 +84,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
entity.setInstanceId(theBatchWorkChunk.instanceId); entity.setInstanceId(theBatchWorkChunk.instanceId);
entity.setSerializedData(theBatchWorkChunk.serializedData); entity.setSerializedData(theBatchWorkChunk.serializedData);
entity.setCreateTime(new Date()); entity.setCreateTime(new Date());
entity.setStartTime(new Date());
entity.setStatus(StatusEnum.QUEUED); entity.setStatus(StatusEnum.QUEUED);
myWorkChunkRepository.save(entity); myWorkChunkRepository.save(entity);
return entity.getId(); return entity.getId();
@ -102,6 +109,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
entity.setParams(theInstance.getParameters()); entity.setParams(theInstance.getParameters());
entity.setCurrentGatedStepId(theInstance.getCurrentGatedStepId()); entity.setCurrentGatedStepId(theInstance.getCurrentGatedStepId());
entity.setCreateTime(new Date()); entity.setCreateTime(new Date());
entity.setStartTime(new Date());
entity.setReport(theInstance.getReport()); entity.setReport(theInstance.getReport());
entity = myJobInstanceRepository.save(entity); entity = myJobInstanceRepository.save(entity);
@ -135,6 +143,23 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
return myJobInstanceRepository.findById(theInstanceId).map(t -> toInstance(t)); return myJobInstanceRepository.findById(theInstanceId).map(t -> toInstance(t));
} }
@Override
public List<JobInstance> fetchInstances(FetchJobInstancesRequest theRequest, int theStart, int theBatchSize) {
String definitionId = theRequest.getJobDefinition();
String params = theRequest.getParameters();
Pageable pageable = Pageable.ofSize(theBatchSize).withPage(theStart);
// TODO - consider adding a new index... on the JobDefinitionId (and possibly Status)
List<JobInstance> instances = myJobInstanceRepository.findInstancesByJobIdAndParams(
definitionId,
params,
pageable
);
return instances == null ? new ArrayList<>() : instances;
}
@Override @Override
public List<JobInstance> fetchInstances(int thePageSize, int thePageIndex) { public List<JobInstance> fetchInstances(int thePageSize, int thePageIndex) {
// default sort is myCreateTime Asc // default sort is myCreateTime Asc
@ -149,49 +174,11 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
} }
private WorkChunk toChunk(Batch2WorkChunkEntity theEntity, boolean theIncludeData) { private WorkChunk toChunk(Batch2WorkChunkEntity theEntity, boolean theIncludeData) {
WorkChunk retVal = new WorkChunk(); return JobInstanceUtil.fromEntityToWorkChunk(theEntity, theIncludeData);
retVal.setId(theEntity.getId());
retVal.setSequence(theEntity.getSequence());
retVal.setJobDefinitionId(theEntity.getJobDefinitionId());
retVal.setJobDefinitionVersion(theEntity.getJobDefinitionVersion());
retVal.setInstanceId(theEntity.getInstanceId());
retVal.setTargetStepId(theEntity.getTargetStepId());
retVal.setStatus(theEntity.getStatus());
retVal.setCreateTime(theEntity.getCreateTime());
retVal.setStartTime(theEntity.getStartTime());
retVal.setEndTime(theEntity.getEndTime());
retVal.setErrorMessage(theEntity.getErrorMessage());
retVal.setErrorCount(theEntity.getErrorCount());
retVal.setRecordsProcessed(theEntity.getRecordsProcessed());
if (theIncludeData) {
if (theEntity.getSerializedData() != null) {
retVal.setData(theEntity.getSerializedData());
}
}
return retVal;
} }
private JobInstance toInstance(Batch2JobInstanceEntity theEntity) { private JobInstance toInstance(Batch2JobInstanceEntity theEntity) {
JobInstance retVal = JobInstance.fromInstanceId(theEntity.getId()); return JobInstanceUtil.fromEntityToInstance(theEntity);
retVal.setJobDefinitionId(theEntity.getDefinitionId());
retVal.setJobDefinitionVersion(theEntity.getDefinitionVersion());
retVal.setStatus(theEntity.getStatus());
retVal.setCancelled(theEntity.isCancelled());
retVal.setStartTime(theEntity.getStartTime());
retVal.setCreateTime(theEntity.getCreateTime());
retVal.setEndTime(theEntity.getEndTime());
retVal.setCombinedRecordsProcessed(theEntity.getCombinedRecordsProcessed());
retVal.setCombinedRecordsProcessedPerSecond(theEntity.getCombinedRecordsProcessedPerSecond());
retVal.setTotalElapsedMillis(theEntity.getTotalElapsedMillis());
retVal.setWorkChunksPurged(theEntity.getWorkChunksPurged());
retVal.setProgress(theEntity.getProgress());
retVal.setErrorMessage(theEntity.getErrorMessage());
retVal.setErrorCount(theEntity.getErrorCount());
retVal.setEstimatedTimeRemaining(theEntity.getEstimatedTimeRemaining());
retVal.setParameters(theEntity.getParams());
retVal.setCurrentGatedStepId(theEntity.getCurrentGatedStepId());
retVal.setReport(theEntity.getReport());
return retVal;
} }
@Override @Override
@ -209,7 +196,14 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
@Override @Override
public void markWorkChunkAsFailed(String theChunkId, String theErrorMessage) { public void markWorkChunkAsFailed(String theChunkId, String theErrorMessage) {
myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError(theChunkId, new Date(), theErrorMessage, StatusEnum.FAILED); String errorMessage;
if (theErrorMessage.length() > Batch2WorkChunkEntity.ERROR_MSG_MAX_LENGTH) {
ourLog.warn("Truncating error message that is too long to store in database: {}", theErrorMessage);
errorMessage = theErrorMessage.substring(0, Batch2WorkChunkEntity.ERROR_MSG_MAX_LENGTH);
} else {
errorMessage = theErrorMessage;
}
myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError(theChunkId, new Date(), errorMessage, StatusEnum.FAILED);
} }
@Override @Override
@ -244,11 +238,24 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
} }
} }
private void fetchChunksForStep(String theInstanceId, String theStepId, int thePageSize, int thePageIndex, Consumer<WorkChunk> theConsumer) {
List<Batch2WorkChunkEntity> chunks = myWorkChunkRepository.fetchChunksForStep(PageRequest.of(thePageIndex, thePageSize), theInstanceId, theStepId);
for (Batch2WorkChunkEntity chunk : chunks) {
theConsumer.accept(toChunk(chunk, true));
}
}
@Override @Override
public Iterator<WorkChunk> fetchAllWorkChunksIterator(String theInstanceId, boolean theWithData) { public Iterator<WorkChunk> fetchAllWorkChunksIterator(String theInstanceId, boolean theWithData) {
return new PagingIterator<>((thePageIndex, theBatchSize, theConsumer) -> fetchChunks(theInstanceId, theWithData, theBatchSize, thePageIndex, theConsumer)); return new PagingIterator<>((thePageIndex, theBatchSize, theConsumer) -> fetchChunks(theInstanceId, theWithData, theBatchSize, thePageIndex, theConsumer));
} }
@Override
public Iterator<WorkChunk> fetchAllWorkChunksForStepIterator(String theInstanceId, String theStepId) {
return new PagingIterator<>((thePageIndex, theBatchSize, theConsumer) -> fetchChunksForStep(theInstanceId, theStepId, theBatchSize, thePageIndex, theConsumer));
}
/** /**
* Update the stored instance * Update the stored instance
* *

View File

@ -1,111 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.util.SearchParameterUtil;
import ca.uhn.fhir.util.UrlUtil;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.util.Date;
import java.util.Map;
import java.util.Optional;
public abstract class BaseJpaBulkItemReader extends BaseBulkItemReader {
@Value("#{jobExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
protected String myJobUUID;
@Autowired
protected DaoRegistry myDaoRegistry;
@Autowired
protected SearchBuilderFactory mySearchBuilderFactory;
@Autowired
private IBulkExportJobDao myBulkExportJobDao;
private ISearchBuilder mySearchBuilder;
private BulkExportJobEntity myJobEntity;
private RuntimeSearchParam myPatientSearchParam;
/**
* Get and cache an ISearchBuilder for the given resource type this partition is responsible for.
*/
protected ISearchBuilder getSearchBuilderForLocalResourceType() {
if (mySearchBuilder == null) {
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(myResourceType);
RuntimeResourceDefinition def = myContext.getResourceDefinition(myResourceType);
Class<? extends IBaseResource> nextTypeClass = def.getImplementingClass();
mySearchBuilder = mySearchBuilderFactory.newSearchBuilder(dao, myResourceType, nextTypeClass);
}
return mySearchBuilder;
}
@Override
protected String[] getTypeFilterList() {
BulkExportJobEntity jobEntity = getJobEntity();
Map<String, String[]> requestUrl = UrlUtil.parseQueryStrings(jobEntity.getRequest());
return requestUrl.get(JpaConstants.PARAM_EXPORT_TYPE_FILTER);
}
@Override
protected Date getSinceDate() {
return getJobEntity().getSince();
}
@Override
protected String getLogInfoForRead() {
return "Bulk export starting generation for batch export job: " + getJobEntity() + " with resourceType " + myResourceType + " and UUID " + myJobUUID;
}
protected BulkExportJobEntity getJobEntity() {
if (myJobEntity == null) {
Optional<BulkExportJobEntity> jobOpt = myBulkExportJobDao.findByJobId(myJobUUID);
if (jobOpt.isPresent()) {
myJobEntity = jobOpt.get();
} else {
String errorMessage = String.format("Job with UUID %s does not exist!", myJobUUID);
throw new IllegalStateException(Msg.code(795) + errorMessage);
}
}
return myJobEntity;
}
protected RuntimeSearchParam getPatientSearchParamForCurrentResourceType() {
if (myPatientSearchParam == null) {
Optional<RuntimeSearchParam> onlyPatientSearchParamForResourceType = SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, myResourceType);
if (onlyPatientSearchParamForResourceType.isPresent()) {
myPatientSearchParam = onlyPatientSearchParamForResourceType.get();
}
}
return myPatientSearchParam;
}
}

View File

@ -1,50 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Will run before and after a job to set the status to whatever is appropriate.
*/
public class BulkExportCreateEntityStepListener implements StepExecutionListener {
@Autowired
private BulkExportDaoSvc myBulkExportDaoSvc;
@Override
public void beforeStep(StepExecution theStepExecution) {
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID");
if (jobUuid != null) {
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.BUILDING);
}
}
@Override
public ExitStatus afterStep(StepExecution theStepExecution) {
return ExitStatus.EXECUTING;
}
}

View File

@ -1,63 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* This class sets the job status to ERROR if any failures occur while actually
* generating the export files.
*/
public class BulkExportGenerateResourceFilesStepListener implements StepExecutionListener {
@Autowired
private BulkExportDaoSvc myBulkExportDaoSvc;
@Override
public void beforeStep(@Nonnull StepExecution stepExecution) {
// nothing
}
@Override
public ExitStatus afterStep(StepExecution theStepExecution) {
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid == null) {
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BatchConstants.JOB_UUID_PARAMETER);
}
assert isNotBlank(jobUuid);
String exitDescription = theStepExecution.getExitStatus().getExitDescription();
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.ERROR, exitDescription);
}
return theStepExecution.getExitStatus();
}
}

View File

@ -1,53 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
/**
* Will run before and after a job to set the status to whatever is appropriate.
*/
public class BulkExportJobCloser implements Tasklet {
@Value("#{jobExecutionContext['jobUUID']}")
private String myJobUUID;
@Autowired
private BulkExportDaoSvc myBulkExportDaoSvc;
@Override
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
if (theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus() == BatchStatus.STARTED) {
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.COMPLETE);
} else {
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.ERROR);
}
return RepeatStatus.FINISHED;
}
}

View File

@ -20,31 +20,9 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.processor.GoldenResourceAnnotatingProcessor;
import ca.uhn.fhir.jpa.batch.processor.PidToIBaseResourceProcessor;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersValidator;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.JobScope;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.support.CompositeItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import java.util.ArrayList;
import java.util.List;
/** /**
* Spring batch Job configuration file. Contains all necessary plumbing to run a * Spring batch Job configuration file. Contains all necessary plumbing to run a
@ -54,220 +32,10 @@ import java.util.List;
public class BulkExportJobConfig { public class BulkExportJobConfig {
public static final int CHUNK_SIZE = 100; public static final int CHUNK_SIZE = 100;
@Autowired
private FhirContext myFhirContext;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private StepBuilderFactory myStepBuilderFactory;
@Autowired
private JobBuilderFactory myJobBuilderFactory;
@Autowired
private PidToIBaseResourceProcessor myPidToIBaseResourceProcessor;
@Autowired
private GoldenResourceAnnotatingProcessor myGoldenResourceAnnotatingProcessor;
@Bean @Bean
public BulkExportDaoSvc bulkExportDaoSvc() {
return new BulkExportDaoSvc();
}
@Bean
@Lazy
@JobScope
public MdmExpansionCacheSvc mdmExpansionCacheSvc() { public MdmExpansionCacheSvc mdmExpansionCacheSvc() {
return new MdmExpansionCacheSvc(); return new MdmExpansionCacheSvc();
} }
@Bean
@Lazy
public Job bulkExportJob() {
return myJobBuilderFactory.get(BatchConstants.BULK_EXPORT_JOB_NAME)
.validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep())
.next(bulkExportPartitionStep())
.next(closeJobStep())
.build();
}
@Bean
@Lazy
@StepScope
public CompositeItemProcessor<List<ResourcePersistentId>, List<IBaseResource>> inflateResourceThenAnnotateWithGoldenResourceProcessor() {
CompositeItemProcessor processor = new CompositeItemProcessor<>();
ArrayList<ItemProcessor> delegates = new ArrayList<>();
delegates.add(myPidToIBaseResourceProcessor);
delegates.add(myGoldenResourceAnnotatingProcessor);
processor.setDelegates(delegates);
return processor;
}
@Bean
@Lazy
public Job groupBulkExportJob() {
return myJobBuilderFactory.get(BatchConstants.GROUP_BULK_EXPORT_JOB_NAME)
.validator(groupBulkJobParameterValidator())
.validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep())
.next(groupPartitionStep())
.next(closeJobStep())
.build();
}
@Bean
@Lazy
public Job patientBulkExportJob() {
return myJobBuilderFactory.get(BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME)
.validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep())
.next(patientPartitionStep())
.next(closeJobStep())
.build();
}
@Bean
public GroupIdPresentValidator groupBulkJobParameterValidator() {
return new GroupIdPresentValidator();
}
@Bean
public Step createBulkExportEntityStep() {
return myStepBuilderFactory.get("createBulkExportEntityStep")
.tasklet(createBulkExportEntityTasklet())
.listener(bulkExportCreateEntityStepListener())
.build();
}
@Bean
public CreateBulkExportEntityTasklet createBulkExportEntityTasklet() {
return new CreateBulkExportEntityTasklet();
}
@Bean
public JobParametersValidator bulkExportJobParameterValidator() {
return new BulkExportJobParameterValidator();
}
//Writers
@Bean
public Step groupBulkExportGenerateResourceFilesStep() {
return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep")
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(groupBulkItemReader())
.processor(inflateResourceThenAnnotateWithGoldenResourceProcessor())
.writer(resourceToFileWriter())
.listener(bulkExportGenerateResourceFilesStepListener())
.build();
}
@Bean
public Step bulkExportGenerateResourceFilesStep() {
return myStepBuilderFactory.get(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP)
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(bulkItemReader())
.processor(myPidToIBaseResourceProcessor)
.writer(resourceToFileWriter())
.listener(bulkExportGenerateResourceFilesStepListener())
.build();
}
@Bean
public Step patientBulkExportGenerateResourceFilesStep() {
return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep")
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(patientBulkItemReader())
.processor(myPidToIBaseResourceProcessor)
.writer(resourceToFileWriter())
.listener(bulkExportGenerateResourceFilesStepListener())
.build();
}
@Bean
@JobScope
public BulkExportJobCloser bulkExportJobCloser() {
return new BulkExportJobCloser();
}
@Bean
public Step closeJobStep() {
return myStepBuilderFactory.get("closeJobStep")
.tasklet(bulkExportJobCloser())
.build();
}
@Bean
@JobScope
public BulkExportCreateEntityStepListener bulkExportCreateEntityStepListener() {
return new BulkExportCreateEntityStepListener();
}
@Bean
@JobScope
public BulkExportGenerateResourceFilesStepListener bulkExportGenerateResourceFilesStepListener() {
return new BulkExportGenerateResourceFilesStepListener();
}
@Bean
public Step bulkExportPartitionStep() {
return myStepBuilderFactory.get("partitionStep")
.partitioner(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP, bulkExportResourceTypePartitioner())
.step(bulkExportGenerateResourceFilesStep())
.build();
}
@Bean
public Step groupPartitionStep() {
return myStepBuilderFactory.get("partitionStep")
.partitioner("groupBulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner())
.step(groupBulkExportGenerateResourceFilesStep())
.build();
}
@Bean
public Step patientPartitionStep() {
return myStepBuilderFactory.get("partitionStep")
.partitioner("patientBulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner())
.step(patientBulkExportGenerateResourceFilesStep())
.build();
}
@Bean
@StepScope
public GroupBulkItemReader groupBulkItemReader() {
return new GroupBulkItemReader();
}
@Bean
@StepScope
public PatientBulkItemReader patientBulkItemReader() {
return new PatientBulkItemReader();
}
@Bean
@StepScope
public BulkItemReader bulkItemReader() {
return new BulkItemReader();
}
@Bean
@JobScope
public ResourceTypePartitioner bulkExportResourceTypePartitioner() {
return new ResourceTypePartitioner();
}
@Bean
@StepScope
public ResourceToFileWriter resourceToFileWriter() {
return new ResourceToFileWriter(myFhirContext, myDaoRegistry);
}
} }

View File

@ -1,95 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.lang3.StringUtils;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.JobParametersValidator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.Arrays;
import java.util.Optional;
/**
* This class will prevent a job from running if the UUID does not exist or is invalid.
*/
public class BulkExportJobParameterValidator implements JobParametersValidator {
@Autowired
private IBulkExportJobDao myBulkExportJobDao;
@Autowired
private PlatformTransactionManager myTransactionManager;
@Override
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
if (theJobParameters == null) {
throw new JobParametersInvalidException(Msg.code(793) + "This job needs Parameters: [readChunkSize], [jobUUID], [filters], [outputFormat], [resourceTypes]");
}
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
String errorMessage = txTemplate.execute(tx -> {
StringBuilder errorBuilder = new StringBuilder();
Long readChunkSize = theJobParameters.getLong(BatchConstants.READ_CHUNK_PARAMETER);
if (readChunkSize == null || readChunkSize < 1) {
errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. ");
}
String jobUUID = theJobParameters.getString(BatchConstants.JOB_UUID_PARAMETER);
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(jobUUID);
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
errorBuilder.append("There is no persisted job that exists with UUID: " + jobUUID + ". ");
}
boolean hasExistingJob = oJob.isPresent();
//Check for to-be-created parameters.
if (!hasExistingJob) {
String resourceTypes = theJobParameters.getString(BatchConstants.JOB_RESOURCE_TYPES_PARAMETER);
if (StringUtils.isBlank(resourceTypes)) {
errorBuilder.append("You must include [").append(BatchConstants.JOB_RESOURCE_TYPES_PARAMETER).append("] as a Job Parameter");
} else {
String[] resourceArray = resourceTypes.split(",");
Arrays.stream(resourceArray).filter(resourceType -> resourceType.equalsIgnoreCase("Binary"))
.findFirst()
.ifPresent(resourceType -> errorBuilder.append("Bulk export of Binary resources is forbidden"));
}
String outputFormat = theJobParameters.getString("outputFormat");
if (!StringUtils.isBlank(outputFormat) && !Constants.CT_FHIR_NDJSON.equals(outputFormat)) {
errorBuilder.append("The only allowed format for Bulk Export is currently " + Constants.CT_FHIR_NDJSON);
}
}
return errorBuilder.toString();
});
if (!StringUtils.isEmpty(errorMessage)) {
throw new JobParametersInvalidException(Msg.code(794) + errorMessage);
}
}
}

View File

@ -1,62 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.slf4j.Logger;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Basic Bulk Export implementation which simply reads all type filters and applies them, along with the _since param
* on a given resource type.
*/
public class BulkItemReader extends BaseJpaBulkItemReader {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
@Override
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID);
Set<ResourcePersistentId> myReadPids = new HashSet<>();
List<SearchParameterMap> map = createSearchParameterMapsForResourceType();
ISearchBuilder sb = getSearchBuilderForLocalResourceType();
for (SearchParameterMap spMap: map) {
ourLog.debug("About to evaluate query {}", spMap.toNormalizedQueryString(myContext));
IResultIterator myResultIterator = sb.createQuery(spMap, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions());
while (myResultIterator.hasNext()) {
myReadPids.add(myResultIterator.next());
}
}
return myReadPids.iterator();
}
}

View File

@ -1,332 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.param.HasOrListParam;
import ca.uhn.fhir.rest.param.HasParam;
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES;
/**
* Bulk Item reader for the Group Bulk Export job.
* Instead of performing a normal query on the resource type using type filters, we instead
*
* 1. Get the group ID defined for this job
* 2. Expand its membership so we get references to all patients in the group
* 3. Optionally further expand that into all MDM-matched Patients (including golden resources)
* 4. Then perform normal bulk export, filtered so that only results that refer to members are returned.
*/
public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemReader<List<ResourcePersistentId>> {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
public static final int QUERY_CHUNK_SIZE = 100;
@Value("#{jobParameters['" + BatchConstants.GROUP_ID_PARAMETER + "']}")
private String myGroupId;
@Value("#{jobParameters['" + BatchConstants.EXPAND_MDM_PARAMETER+ "'] ?: false}")
private boolean myMdmEnabled;
@Autowired
private IIdHelperService myIdHelperService;
@Autowired
private IMdmLinkDao myMdmLinkDao;
@Autowired
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
@Autowired
private IJpaIdHelperService myJpaIdHelperService;
@Override
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
//Short circuit out if we detect we are attempting to export groups.
if (myResourceType.equalsIgnoreCase("Group")) {
return getSingletonGroupIterator();
}
//Short circuit out if we detect we are attempting to extract patients
if (myResourceType.equalsIgnoreCase("Patient")) {
return getExpandedPatientIterator();
}
//First lets expand the group so we get a list of all patient IDs of the group, and MDM-matched patient IDs of the group.
Set<String> expandedMemberResourceIds = expandAllPatientPidsFromGroup();
if (ourLog.isDebugEnabled()) {
ourLog.debug("Group/{} has been expanded to members:[{}]", myGroupId, String.join(",", expandedMemberResourceIds));
}
//Next, let's search for the target resources, with their correct patient references, chunked.
//The results will be jammed into myReadPids
Set<ResourcePersistentId> myExpandedMemberPids = new HashSet<>();
QueryChunker<String> queryChunker = new QueryChunker<>();
queryChunker.chunk(new ArrayList<>(expandedMemberResourceIds), QUERY_CHUNK_SIZE, (idChunk) -> {
queryResourceTypeWithReferencesToPatients(myExpandedMemberPids, idChunk);
});
if (ourLog.isDebugEnabled()) {
ourLog.debug("Resource PIDs to be Bulk Exported: {}", myExpandedMemberPids);
}
return myExpandedMemberPids.iterator();
}
/**
* If we are exporting a group during group export, we intentionally do not expand to other groups the member might be part of.
* This code short-circuits the standard "expand group into its members then check the search compartment" logic.
*
* @return An iterator containing a single Group ID.
*/
private Iterator<ResourcePersistentId> getSingletonGroupIterator() {
String stringedId = new IdDt(myGroupId).getIdPart();
ResourcePersistentId groupId = myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), myResourceType, stringedId);
return Collections.singletonList(groupId).iterator();
}
/**
* In case we are doing a Group Bulk Export and resourceType `Patient` is requested, we can just return the group members,
* possibly expanded by MDM, and don't have to go and fetch other resource DAOs.
*/
private Iterator<ResourcePersistentId> getExpandedPatientIterator() {
List<String> members = getMembers();
List<IIdType> ids = members.stream().map(member -> new IdDt("Patient/" + member)).collect(Collectors.toList());
List<Long> pidsOrThrowException =myJpaIdHelperService.getPidsOrThrowException(ids);
Set<Long> patientPidsToExport = new HashSet<>(pidsOrThrowException);
if (myMdmEnabled) {
SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions();
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), srd);
Long pidOrNull = myJpaIdHelperService.getPidOrNull(group);
List<IMdmLinkDao.MdmPidTuple> goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
goldenPidSourcePidTuple.forEach(tuple -> {
patientPidsToExport.add(tuple.getGoldenPid());
patientPidsToExport.add(tuple.getSourcePid());
});
populateMdmResourceCache(goldenPidSourcePidTuple);
}
List<ResourcePersistentId> resourcePersistentIds = patientPidsToExport
.stream()
.map(ResourcePersistentId::new)
.collect(Collectors.toList());
return resourcePersistentIds.iterator();
}
/**
* @param thePidTuples
*/
private void populateMdmResourceCache(List<IMdmLinkDao.MdmPidTuple> thePidTuples) {
if (myMdmExpansionCacheSvc.hasBeenPopulated()) {
return;
}
//First, convert this zipped set of tuples to a map of
//{
// patient/gold-1 -> [patient/1, patient/2]
// patient/gold-2 -> [patient/3, patient/4]
//}
Map<Long, Set<Long>> goldenResourceToSourcePidMap = new HashMap<>();
extract(thePidTuples, goldenResourceToSourcePidMap);
//Next, lets convert it to an inverted index for fast lookup
// {
// patient/1 -> patient/gold-1
// patient/2 -> patient/gold-1
// patient/3 -> patient/gold-2
// patient/4 -> patient/gold-2
// }
Map<String, String> sourceResourceIdToGoldenResourceIdMap = new HashMap<>();
goldenResourceToSourcePidMap.forEach((key, value) -> {
String goldenResourceId = myIdHelperService.translatePidIdToForcedIdWithCache(new ResourcePersistentId(key)).orElse(key.toString());
Map<Long, Optional<String>> pidsToForcedIds = myIdHelperService.translatePidsToForcedIds(value);
Set<String> sourceResourceIds = pidsToForcedIds.entrySet().stream()
.map(ent -> ent.getValue().isPresent() ? ent.getValue().get() : ent.getKey().toString())
.collect(Collectors.toSet());
sourceResourceIds
.forEach(sourceResourceId -> sourceResourceIdToGoldenResourceIdMap.put(sourceResourceId, goldenResourceId));
});
//Now that we have built our cached expansion, store it.
myMdmExpansionCacheSvc.setCacheContents(sourceResourceIdToGoldenResourceIdMap);
}
/**
* Given the local myGroupId, read this group, and find all members' patient references.
* @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"]
*/
private List<String> getMembers() {
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
List<IPrimitiveType> evaluate = myContext.newFhirPath().evaluate(group, "member.entity.reference", IPrimitiveType.class);
return evaluate.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList());
}
/**
* Given the local myGroupId, perform an expansion to retrieve all resource IDs of member patients.
* if myMdmEnabled is set to true, we also reach out to the IMdmLinkDao to attempt to also expand it into matched
* patients.
*
* @return a Set of Strings representing the resource IDs of all members of a group.
*/
private Set<String> expandAllPatientPidsFromGroup() {
Set<String> expandedIds = new HashSet<>();
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
Long pidOrNull = myJpaIdHelperService.getPidOrNull(group);
//Attempt to perform MDM Expansion of membership
if (myMdmEnabled) {
List<IMdmLinkDao.MdmPidTuple> goldenPidTargetPidTuples = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
//Now lets translate these pids into resource IDs
Set<Long> uniquePids = new HashSet<>();
goldenPidTargetPidTuples.forEach(tuple -> {
uniquePids.add(tuple.getGoldenPid());
uniquePids.add(tuple.getSourcePid());
});
Map<Long, Optional<String>> pidToForcedIdMap = myIdHelperService.translatePidsToForcedIds(uniquePids);
Map<Long, Set<Long>> goldenResourceToSourcePidMap = new HashMap<>();
extract(goldenPidTargetPidTuples, goldenResourceToSourcePidMap);
populateMdmResourceCache(goldenPidTargetPidTuples);
//If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID as the resource ID.
Set<String> resolvedResourceIds = pidToForcedIdMap.entrySet().stream()
.map(entry -> entry.getValue().isPresent() ? entry.getValue().get() : entry.getKey().toString())
.collect(Collectors.toSet());
expandedIds.addAll(resolvedResourceIds);
}
//Now manually add the members of the group (its possible even with mdm expansion that some members dont have MDM matches,
//so would be otherwise skipped
expandedIds.addAll(getMembers());
return expandedIds;
}
private void extract(List<IMdmLinkDao.MdmPidTuple> theGoldenPidTargetPidTuples, Map<Long, Set<Long>> theGoldenResourceToSourcePidMap) {
for (IMdmLinkDao.MdmPidTuple goldenPidTargetPidTuple : theGoldenPidTargetPidTuples) {
Long goldenPid = goldenPidTargetPidTuple.getGoldenPid();
Long sourcePid = goldenPidTargetPidTuple.getSourcePid();
theGoldenResourceToSourcePidMap.computeIfAbsent(goldenPid, key -> new HashSet<>()).add(sourcePid);
}
}
private void queryResourceTypeWithReferencesToPatients(Set<ResourcePersistentId> myReadPids, List<String> idChunk) {
//Build SP map
//First, inject the _typeFilters and _since from the export job
List<SearchParameterMap> expandedSpMaps = createSearchParameterMapsForResourceType();
for (SearchParameterMap expandedSpMap: expandedSpMaps) {
//Since we are in a bulk job, we have to ensure the user didn't jam in a patient search param, since we need to manually set that.
validateSearchParameters(expandedSpMap);
// Fetch and cache a search builder for this resource type
ISearchBuilder searchBuilder = getSearchBuilderForLocalResourceType();
// Now, further filter the query with patient references defined by the chunk of IDs we have.
if (PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(myResourceType)) {
filterSearchByHasParam(idChunk, expandedSpMap);
} else {
filterSearchByResourceIds(idChunk, expandedSpMap);
}
//Execute query and all found pids to our local iterator.
IResultIterator resultIterator = searchBuilder.createQuery(expandedSpMap, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions());
while (resultIterator.hasNext()) {
myReadPids.add(resultIterator.next());
}
}
}
/**
*
* @param idChunk
* @param expandedSpMap
*/
private void filterSearchByHasParam(List<String> idChunk, SearchParameterMap expandedSpMap) {
HasOrListParam hasOrListParam = new HasOrListParam();
idChunk.stream().forEach(id -> hasOrListParam.addOr(buildHasParam(id)));
expandedSpMap.add("_has", hasOrListParam);
}
private HasParam buildHasParam(String theId) {
if ("Practitioner".equalsIgnoreCase(myResourceType)) {
return new HasParam("Patient", "general-practitioner", "_id", theId);
} else if ("Organization".equalsIgnoreCase(myResourceType)) {
return new HasParam("Patient", "organization", "_id", theId);
} else {
throw new IllegalArgumentException(Msg.code(2077) + " We can't handle forward references onto type " + myResourceType);
}
}
private void filterSearchByResourceIds(List<String> idChunk, SearchParameterMap expandedSpMap) {
ReferenceOrListParam orList = new ReferenceOrListParam();
idChunk.forEach(id -> orList.add(new ReferenceParam(id)));
expandedSpMap.add(getPatientSearchParamForCurrentResourceType().getName(), orList);
}
private void validateSearchParameters(SearchParameterMap expandedSpMap) {
if (PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(myResourceType)) {
return;
} else {
RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType();
if (expandedSpMap.get(runtimeSearchParam.getName()) != null) {
throw new IllegalArgumentException(Msg.code(792) + String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName()));
}
}
}
}

View File

@ -1,99 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.param.ReferenceParam;
import org.slf4j.Logger;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Bulk Item reader for the Patient Bulk Export job.
* Instead of performing a normal query on the resource type using type filters, we instead
*
* 1. Determine the resourcetype
* 2. Search for anything that has `patient-compartment-search-param:missing=false`
*/
public class PatientBulkItemReader extends BaseJpaBulkItemReader implements ItemReader<List<ResourcePersistentId>> {
@Autowired
private DaoConfig myDaoConfig;
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
private RuntimeSearchParam validateSearchParameters(SearchParameterMap expandedSpMap) {
RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType();
if (expandedSpMap.get(runtimeSearchParam.getName()) != null) {
throw new IllegalArgumentException(Msg.code(796) + String.format("Patient Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName()));
}
return runtimeSearchParam;
}
@Override
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export";
ourLog.error(errorMessage);
throw new IllegalStateException(Msg.code(797) + errorMessage);
}
List<ResourcePersistentId> myReadPids = new ArrayList<>();
//use _typeFilter and _since and all those fancy bits and bobs to generate our basic SP map.
List<SearchParameterMap> maps = createSearchParameterMapsForResourceType();
String patientSearchParam = getPatientSearchParamForCurrentResourceType().getName();
for (SearchParameterMap map: maps) {
//Ensure users did not monkey with the patient compartment search parameter.
validateSearchParameters(map);
//Skip adding the parameter querying for patient= if we are in fact querying the patient resource type.
if (!myResourceType.equalsIgnoreCase("Patient")) {
map.add(patientSearchParam, new ReferenceParam().setMissing(false));
}
ourLog.debug("About to execute query {}", map.toNormalizedQueryString(myContext));
ISearchBuilder sb = getSearchBuilderForLocalResourceType();
IResultIterator myResultIterator = sb.createQuery(map, new SearchRuntimeDetails(null, myJobUUID), null, RequestPartitionId.allPartitions());
while (myResultIterator.hasNext()) {
myReadPids.add(myResultIterator.next());
}
}
return myReadPids.iterator();
}
}

View File

@ -1,69 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.PostConstruct;
import java.util.Optional;
public class ResourceToFileWriter extends BaseResourceToFileWriter {
@Autowired
private BulkExportDaoSvc myBulkExportDaoSvc;
public ResourceToFileWriter(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
super(theFhirContext, theDaoRegistry);
}
@PostConstruct
public void start() {
myBinaryDao = getBinaryDao();
}
@Override
protected Optional<IIdType> flushToFiles() {
if (myOutputStream.size() > 0) {
IIdType createdId = createBinaryFromOutputStream();
BulkExportCollectionFileEntity file = new BulkExportCollectionFileEntity();
file.setResource(createdId.getIdPart());
myBulkExportDaoSvc.addFileToCollectionWithId(myBulkExportCollectionEntityId, file);
myOutputStream.reset();
return Optional.of(createdId);
}
return Optional.empty();
}
@SuppressWarnings("unchecked")
private IFhirResourceDao<IBaseBinary> getBinaryDao() {
return myDaoRegistry.getResourceDao("Binary");
}
}

View File

@ -1,76 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.slf4j.Logger;
import org.springframework.batch.core.partition.support.Partitioner;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.util.HashMap;
import java.util.Map;
import static org.slf4j.LoggerFactory.getLogger;
public class ResourceTypePartitioner implements Partitioner {
private static final Logger ourLog = getLogger(ResourceTypePartitioner.class);
@Value("#{jobExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER+ "']}")
private String myJobUUID;
@Autowired
private BulkExportDaoSvc myBulkExportDaoSvc;
@Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> partitionContextMap = new HashMap<>();
Map<Long, String> idToResourceType = myBulkExportDaoSvc.getBulkJobCollectionIdToResourceTypeMap(myJobUUID);
idToResourceType.entrySet().stream()
.forEach(entry -> {
String resourceType = entry.getValue();
Long collectionEntityId = entry.getKey();
ourLog.debug("Creating a partition step for CollectionEntity: [{}] processing resource type [{}]", collectionEntityId, resourceType);
ExecutionContext context = new ExecutionContext();
//The worker step needs to know what resource type it is looking for.
context.putString(BatchConstants.JOB_EXECUTION_RESOURCE_TYPE, resourceType);
// The worker step needs to know which parent job it is processing for, and which collection entity it will be
// attaching its results to.
context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
context.putLong(BatchConstants.JOB_COLLECTION_ENTITY_ID, collectionEntityId);
// Name the partition based on the resource type
partitionContextMap.put(resourceType, context);
});
return partitionContextMap;
}
}

View File

@ -67,8 +67,6 @@ import static org.slf4j.LoggerFactory.getLogger;
public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJobSchedulingHelper { public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJobSchedulingHelper {
private static final Logger ourLog = getLogger(BulkDataExportJobSchedulingHelperImpl.class); private static final Logger ourLog = getLogger(BulkDataExportJobSchedulingHelperImpl.class);
private static final Long READ_CHUNK_SIZE = 10L;
@Autowired @Autowired
private DaoRegistry myDaoRegistry; private DaoRegistry myDaoRegistry;
@ -88,18 +86,6 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
@Autowired @Autowired
private ISchedulerService mySchedulerService; private ISchedulerService mySchedulerService;
@Autowired
@Qualifier(BatchConstants.BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myBulkExportJob;
@Autowired
@Qualifier(BatchConstants.GROUP_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myGroupBulkExportJob;
@Autowired
@Qualifier(BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myPatientBulkExportJob;
@Autowired @Autowired
private IBulkExportJobDao myBulkExportJobDao; private IBulkExportJobDao myBulkExportJobDao;
@ -109,70 +95,20 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
@Autowired @Autowired
private FhirContext myContext; private FhirContext myContext;
@Autowired
private BulkExportHelperService myBulkExportHelperSvc;
@PostConstruct @PostConstruct
public void start() { public void start() {
myTxTemplate = new TransactionTemplate(myTxManager); myTxTemplate = new TransactionTemplate(myTxManager);
// job to cleanup unneeded BulkExportJobEntities that are persisted, but unwanted
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
jobDetail.setId(Job.class.getName());
jobDetail.setJobClass(Job.class);
mySchedulerService.scheduleClusteredJob(10 * DateUtils.MILLIS_PER_SECOND, jobDetail);
jobDetail = new ScheduledJobDefinition();
jobDetail.setId(PurgeExpiredFilesJob.class.getName()); jobDetail.setId(PurgeExpiredFilesJob.class.getName());
jobDetail.setJobClass(PurgeExpiredFilesJob.class); jobDetail.setJobClass(PurgeExpiredFilesJob.class);
mySchedulerService.scheduleClusteredJob(DateUtils.MILLIS_PER_HOUR, jobDetail); mySchedulerService.scheduleClusteredJob(DateUtils.MILLIS_PER_HOUR, jobDetail);
} }
/**
* This method is called by the scheduler to run a pass of the
* generator
*/
@Transactional(value = Transactional.TxType.NEVER)
@Override
public synchronized void startSubmittedJobs() {
if (!myDaoConfig.isEnableTaskBulkExportJobExecution()) {
return;
}
Optional<BulkExportJobEntity> jobToProcessOpt = myTxTemplate.execute(t -> {
Pageable page = PageRequest.of(0, 1);
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkExportJobStatusEnum.SUBMITTED);
if (submittedJobs.isEmpty()) {
return Optional.empty();
}
return Optional.of(submittedJobs.getContent().get(0));
});
if (!jobToProcessOpt.isPresent()) {
return;
}
BulkExportJobEntity bulkExportJobEntity = jobToProcessOpt.get();
String jobUuid = bulkExportJobEntity.getJobId();
try {
processJob(bulkExportJobEntity);
} catch (Exception e) {
ourLog.error("Failure while preparing bulk export extract", e);
myTxTemplate.execute(t -> {
Optional<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByJobId(jobUuid);
if (submittedJobs.isPresent()) {
BulkExportJobEntity jobEntity = submittedJobs.get();
jobEntity.setStatus(BulkExportJobStatusEnum.ERROR);
jobEntity.setStatusMessage(e.getMessage());
myBulkExportJobDao.save(jobEntity);
}
return null;
});
}
}
@Override @Override
@Transactional(Transactional.TxType.NEVER) @Transactional(Transactional.TxType.NEVER)
public synchronized void cancelAndPurgeAllJobs() { public synchronized void cancelAndPurgeAllJobs() {
@ -208,18 +144,17 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
}); });
if (jobToDelete.isPresent()) { if (jobToDelete.isPresent()) {
ourLog.info("Deleting bulk export job: {}", jobToDelete.get()); ourLog.info("Deleting bulk export job: {}", jobToDelete.get());
myTxTemplate.execute(t -> { myTxTemplate.execute(t -> {
BulkExportJobEntity job = myBulkExportJobDao.getOne(jobToDelete.get().getId()); BulkExportJobEntity job = myBulkExportJobDao.getOne(jobToDelete.get().getId());
for (BulkExportCollectionEntity nextCollection : job.getCollections()) { for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) { for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
ourLog.info("Purging bulk data file: {}", nextFile.getResourceId()); ourLog.info("Purging bulk data file: {}", nextFile.getResourceId());
getBinaryDao().delete(toId(nextFile.getResourceId()), new SystemRequestDetails()); IIdType id = myBulkExportHelperSvc.toId(nextFile.getResourceId());
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), new SystemRequestDetails()); getBinaryDao().delete(id, new SystemRequestDetails());
getBinaryDao().forceExpungeInExistingTransaction(id, new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), new SystemRequestDetails());
myBulkExportCollectionFileDao.deleteByPid(nextFile.getId()); myBulkExportCollectionFileDao.deleteByPid(nextFile.getId());
} }
@ -241,71 +176,6 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
return myDaoRegistry.getResourceDao("Binary"); return myDaoRegistry.getResourceDao("Binary");
} }
private IIdType toId(String theResourceId) {
IIdType retVal = myContext.getVersion().newIdType();
retVal.setValue(theResourceId);
return retVal;
}
private void processJob(BulkExportJobEntity theBulkExportJobEntity) {
String theJobUuid = theBulkExportJobEntity.getJobId();
JobParametersBuilder parameters = new JobParametersBuilder()
.addString(BatchConstants.JOB_UUID_PARAMETER, theJobUuid)
.addLong(BatchConstants.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
ourLog.info("Submitting bulk export job {} to job scheduler", theJobUuid);
try {
if (isGroupBulkJob(theBulkExportJobEntity)) {
enhanceBulkParametersWithGroupParameters(theBulkExportJobEntity, parameters);
myJobSubmitter.runJob(myGroupBulkExportJob, parameters.toJobParameters());
} else if (isPatientBulkJob(theBulkExportJobEntity)) {
myJobSubmitter.runJob(myPatientBulkExportJob, parameters.toJobParameters());
} else {
myJobSubmitter.runJob(myBulkExportJob, parameters.toJobParameters());
}
} catch (JobParametersInvalidException theE) {
ourLog.error("Unable to start job with UUID: {}, the parameters are invalid. {}", theJobUuid, theE.getMessage());
}
}
private String getQueryParameterIfPresent(String theRequestString, String theParameter) {
Map<String, String[]> stringMap = UrlUtil.parseQueryString(theRequestString);
if (stringMap != null) {
String[] strings = stringMap.get(theParameter);
if (strings != null) {
return String.join(",", strings);
}
}
return null;
}
private boolean isPatientBulkJob(BulkExportJobEntity theBulkExportJobEntity) {
return theBulkExportJobEntity.getRequest().startsWith("/Patient/");
}
private boolean isGroupBulkJob(BulkExportJobEntity theBulkExportJobEntity) {
return theBulkExportJobEntity.getRequest().startsWith("/Group/");
}
private void enhanceBulkParametersWithGroupParameters(BulkExportJobEntity theBulkExportJobEntity, JobParametersBuilder theParameters) {
String theGroupId = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_GROUP_ID);
String expandMdm = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_MDM);
theParameters.addString(BatchConstants.GROUP_ID_PARAMETER, theGroupId);
theParameters.addString(BatchConstants.EXPAND_MDM_PARAMETER, expandMdm);
}
public static class Job implements HapiJob {
@Autowired
private IBulkDataExportJobSchedulingHelper myTarget;
@Override
public void execute(JobExecutionContext theContext) {
myTarget.startSubmittedJobs();
}
}
public static class PurgeExpiredFilesJob implements HapiJob { public static class PurgeExpiredFilesJob implements HapiJob {
@Autowired @Autowired
private IBulkDataExportJobSchedulingHelper myTarget; private IBulkDataExportJobSchedulingHelper myTarget;

View File

@ -1,306 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.svc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.SearchParameterUtil;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.InstantType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import javax.transaction.Transactional;
import java.util.Date;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import static ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions.ExportStyle.GROUP;
import static ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions.ExportStyle.PATIENT;
import static ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions.ExportStyle.SYSTEM;
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam;
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImpl.class);
private final int myReuseBulkExportForMillis = (int) (60 * DateUtils.MILLIS_PER_MINUTE);
@Autowired
private IBulkExportJobDao myBulkExportJobDao;
@Autowired
private IBulkExportCollectionDao myBulkExportCollectionDao;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private FhirContext myContext;
@Autowired
private DaoConfig myDaoConfig;
private Set<String> myCompartmentResources;
@Transactional
@Override
@Deprecated
public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions) {
return submitJob(theBulkDataExportOptions, true, null);
}
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Transactional
@Override
public JobInfo submitJob(BulkDataExportOptions theBulkDataExportOptions, Boolean useCache, RequestDetails theRequestDetails) {
String outputFormat = Constants.CT_FHIR_NDJSON;
if (isNotBlank(theBulkDataExportOptions.getOutputFormat())) {
outputFormat = theBulkDataExportOptions.getOutputFormat();
}
if (!Constants.CTS_NDJSON.contains(outputFormat)) {
throw new InvalidRequestException(Msg.code(786) + "Invalid output format: " + theBulkDataExportOptions.getOutputFormat());
}
// Interceptor call: STORAGE_INITIATE_BULK_EXPORT
HookParams params = new HookParams()
.add(BulkDataExportOptions.class, theBulkDataExportOptions)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_INITIATE_BULK_EXPORT, params);
// TODO GGG KS can we encode BulkDataExportOptions as a JSON string as opposed to this request string. Feels like it would be a more extensible encoding...
//Probably yes, but this will all need to be rebuilt when we remove this bridge entity
StringBuilder requestBuilder = new StringBuilder();
requestBuilder.append("/");
//Prefix the export url with Group/[id]/ or /Patient/ depending on what type of request it is.
if (theBulkDataExportOptions.getExportStyle().equals(GROUP)) {
requestBuilder.append(theBulkDataExportOptions.getGroupId().toVersionless()).append("/");
} else if (theBulkDataExportOptions.getExportStyle().equals(PATIENT)) {
requestBuilder.append("Patient/");
}
requestBuilder.append(JpaConstants.OPERATION_EXPORT);
requestBuilder.append("?").append(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT).append("=").append(escapeUrlParam(outputFormat));
Set<String> resourceTypes = theBulkDataExportOptions.getResourceTypes();
if (resourceTypes != null) {
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE).append("=").append(String.join(",", escapeUrlParams(resourceTypes)));
}
Date since = theBulkDataExportOptions.getSince();
if (since != null) {
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_SINCE).append("=").append(new InstantType(since).setTimeZoneZulu(true).getValueAsString());
}
if (theBulkDataExportOptions.getFilters() != null && theBulkDataExportOptions.getFilters().size() > 0) {
theBulkDataExportOptions.getFilters().stream()
.forEach(filter -> requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_TYPE_FILTER).append("=").append(escapeUrlParam(filter)));
}
if (theBulkDataExportOptions.getExportStyle().equals(GROUP)) {
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_GROUP_ID).append("=").append(theBulkDataExportOptions.getGroupId().getValue());
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(theBulkDataExportOptions.isExpandMdm());
}
String request = requestBuilder.toString();
//If we are using the cache, then attempt to retrieve a matching job based on the Request String, otherwise just make a new one.
if (useCache) {
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
Pageable page = PageRequest.of(0, 10);
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkExportJobStatusEnum.ERROR);
if (!existing.isEmpty()) {
return toSubmittedJobInfo(existing.iterator().next());
}
}
if (resourceTypes != null && resourceTypes.contains("Binary")) {
String msg = myContext.getLocalizer().getMessage(BulkDataExportSvcImpl.class, "onlyBinarySelected");
throw new InvalidRequestException(Msg.code(787) + msg);
}
if (resourceTypes == null || resourceTypes.isEmpty()) {
// This is probably not a useful default, but having the default be "download the whole
// server" seems like a risky default too. We'll deal with that by having the default involve
// only returning a small time span
resourceTypes = getAllowedResourceTypesForBulkExportStyle(theBulkDataExportOptions.getExportStyle());
if (since == null) {
since = DateUtils.addDays(new Date(), -1);
}
}
resourceTypes =
resourceTypes
.stream()
.filter(t -> !"Binary".equals(t))
.collect(Collectors.toSet());
BulkExportJobEntity jobEntity = new BulkExportJobEntity();
jobEntity.setJobId(UUID.randomUUID().toString());
jobEntity.setStatus(BulkExportJobStatusEnum.SUBMITTED);
jobEntity.setSince(since);
jobEntity.setCreated(new Date());
jobEntity.setRequest(request);
// Validate types
validateTypes(resourceTypes);
validateTypeFilters(theBulkDataExportOptions.getFilters(), resourceTypes);
updateExpiry(jobEntity);
myBulkExportJobDao.save(jobEntity);
for (String nextType : resourceTypes) {
BulkExportCollectionEntity collection = new BulkExportCollectionEntity();
collection.setJob(jobEntity);
collection.setResourceType(nextType);
jobEntity.getCollections().add(collection);
myBulkExportCollectionDao.save(collection);
}
ourLog.info("Bulk export job submitted: {}", jobEntity.toString());
return toSubmittedJobInfo(jobEntity);
}
public void validateTypes(Set<String> theResourceTypes) {
for (String nextType : theResourceTypes) {
if (!myDaoRegistry.isResourceTypeSupported(nextType)) {
String msg = myContext.getLocalizer().getMessage(BulkDataExportSvcImpl.class, "unknownResourceType", nextType);
throw new InvalidRequestException(Msg.code(788) + msg);
}
}
}
public void validateTypeFilters(Set<String> theTheFilters, Set<String> theResourceTypes) {
if (theTheFilters != null) {
for (String next : theTheFilters) {
if (!next.contains("?")) {
throw new InvalidRequestException(Msg.code(789) + "Invalid " + JpaConstants.PARAM_EXPORT_TYPE_FILTER + " value \"" + next + "\". Must be in the form [ResourceType]?[params]");
}
String resourceType = next.substring(0, next.indexOf("?"));
if (!theResourceTypes.contains(resourceType)) {
throw new InvalidRequestException(Msg.code(790) + "Invalid " + JpaConstants.PARAM_EXPORT_TYPE_FILTER + " value \"" + next + "\". Resource type does not appear in " + JpaConstants.PARAM_EXPORT_TYPE+ " list");
}
}
}
}
private JobInfo toSubmittedJobInfo(BulkExportJobEntity theJob) {
return new JobInfo()
.setJobId(theJob.getJobId())
.setStatus(theJob.getStatus());
}
/**
* If the retention period is set to <= 0, set it to null, which prevents it from getting expired, otherwise, set
* the retention period.
*
* @param theJob the job to update the expiry for.
*/
private void updateExpiry(BulkExportJobEntity theJob) {
if (myDaoConfig.getBulkExportFileRetentionPeriodHours() > 0) {
theJob.setExpiry(DateUtils.addHours(new Date(), myDaoConfig.getBulkExportFileRetentionPeriodHours()));
} else {
theJob.setExpiry(null);
}
}
@Transactional
@Override
public JobInfo getJobInfoOrThrowResourceNotFound(String theJobId) {
BulkExportJobEntity job = myBulkExportJobDao
.findByJobId(theJobId)
.orElseThrow(() -> new ResourceNotFoundException(theJobId));
JobInfo retVal = new JobInfo();
retVal.setJobId(theJobId);
retVal.setStatus(job.getStatus());
retVal.setStatus(job.getStatus());
retVal.setStatusTime(job.getStatusTime());
retVal.setStatusMessage(job.getStatusMessage());
retVal.setRequest(job.getRequest());
if (job.getStatus() == BulkExportJobStatusEnum.COMPLETE) {
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
retVal.addFile()
.setResourceType(nextCollection.getResourceType())
.setResourceId(toQualifiedBinaryId(nextFile.getResourceId()));
}
}
}
return retVal;
}
@Override
public Set<String> getPatientCompartmentResources() {
if (myCompartmentResources == null) {
myCompartmentResources = myContext.getResourceTypes().stream()
.filter(resType -> SearchParameterUtil.isResourceTypeInPatientCompartment(myContext, resType))
.collect(Collectors.toSet());
}
return myCompartmentResources;
}
public Set<String> getAllowedResourceTypesForBulkExportStyle(BulkDataExportOptions.ExportStyle theExportStyle) {
if (theExportStyle.equals(SYSTEM)) {
return myContext.getResourceTypes();
} else if (theExportStyle.equals(GROUP) || theExportStyle.equals(PATIENT)) {
return getPatientCompartmentResources();
} else {
throw new IllegalArgumentException(Msg.code(791) + String.format("HAPI FHIR does not recognize a Bulk Export request of type: %s", theExportStyle));
}
}
private IIdType toQualifiedBinaryId(String theIdPart) {
IIdType retVal = myContext.getVersion().newIdType();
retVal.setParts(null, "Binary", theIdPart, null);
return retVal;
}
}

View File

@ -1,105 +0,0 @@
package ca.uhn.fhir.jpa.bulk.export.svc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.transaction.Transactional;
import java.util.Collection;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import static org.slf4j.LoggerFactory.getLogger;
@Service
public class BulkExportDaoSvc {
private static final Logger ourLog = getLogger(BulkExportDaoSvc.class);
@Autowired
IBulkExportJobDao myBulkExportJobDao;
@Autowired
IBulkExportCollectionDao myBulkExportCollectionDao;
@Autowired
IBulkExportCollectionFileDao myBulkExportCollectionFileDao;
@Transactional
public void addFileToCollectionWithId(Long theCollectionEntityId, BulkExportCollectionFileEntity theFile) {
Optional<BulkExportCollectionEntity> byId = myBulkExportCollectionDao.findById(theCollectionEntityId);
if (byId.isPresent()) {
BulkExportCollectionEntity exportCollectionEntity = byId.get();
theFile.setCollection(exportCollectionEntity);
exportCollectionEntity.getFiles().add(theFile);
myBulkExportCollectionFileDao.saveAndFlush(theFile);
myBulkExportCollectionDao.saveAndFlush(exportCollectionEntity);
}
}
@Transactional
public Map<Long, String> getBulkJobCollectionIdToResourceTypeMap(String theJobUUID) {
BulkExportJobEntity bulkExportJobEntity = loadJob(theJobUUID);
Collection<BulkExportCollectionEntity> collections = bulkExportJobEntity.getCollections();
return collections.stream()
.collect(Collectors.toMap(
BulkExportCollectionEntity::getId,
BulkExportCollectionEntity::getResourceType
));
}
private BulkExportJobEntity loadJob(String theJobUUID) {
Optional<BulkExportJobEntity> jobOpt = myBulkExportJobDao.findByJobId(theJobUUID);
if (!jobOpt.isPresent()) {
ourLog.warn("Job with UUID {} appears to be deleted", theJobUUID);
return null;
}
return jobOpt.get();
}
@Transactional
public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus) {
setJobToStatus(theJobUUID, theStatus, null);
}
@Transactional
public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus, String theStatusMessage) {
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(theJobUUID);
if (!oJob.isPresent()) {
ourLog.error("Job with UUID {} doesn't exist!", theJobUUID);
return;
}
ourLog.info("Setting job with UUID {} to {}", theJobUUID, theStatus);
BulkExportJobEntity bulkExportJobEntity = oJob.get();
bulkExportJobEntity.setStatus(theStatus);
bulkExportJobEntity.setStatusMessage(theStatusMessage);
myBulkExportJobDao.save(bulkExportJobEntity);
}
}

View File

@ -0,0 +1,526 @@
package ca.uhn.fhir.jpa.bulk.export.svc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.fhirpath.IFhirPath;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
import ca.uhn.fhir.jpa.bulk.export.model.ExportPIDIteratorParameters;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.param.HasOrListParam;
import ca.uhn.fhir.rest.param.HasParam;
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.util.ExtensionUtil;
import ca.uhn.fhir.util.HapiExtensions;
import ca.uhn.fhir.util.SearchParameterUtil;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseReference;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class JpaBulkExportProcessor implements IBulkExportProcessor {
private static final Logger ourLog = LoggerFactory.getLogger(JpaBulkExportProcessor.class);
public static final int QUERY_CHUNK_SIZE = 100;
public static final List<String> PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES = List.of("Practitioner", "Organization");
@Autowired
private FhirContext myContext;
@Autowired
private BulkExportHelperService myBulkExportHelperSvc;
@Autowired
private DaoConfig myDaoConfig;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
protected SearchBuilderFactory mySearchBuilderFactory;
@Autowired
private IIdHelperService myIdHelperService;
@Autowired
private IMdmLinkDao myMdmLinkDao;
@Autowired
private IJpaIdHelperService myJpaIdHelperService;
@Autowired
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
private final HashMap<String, ISearchBuilder> myResourceTypeToSearchBuilder = new HashMap<>();
private final HashMap<String, String> myResourceTypeToFhirPath = new HashMap<>();
private IFhirPath myFhirPath;
@Transactional
@Override
public Iterator<ResourcePersistentId> getResourcePidIterator(ExportPIDIteratorParameters theParams) {
String resourceType = theParams.getResourceType();
String jobId = theParams.getJobId();
RuntimeResourceDefinition def = myContext.getResourceDefinition(resourceType);
Set<ResourcePersistentId> pids = new HashSet<>();
if (theParams.getExportStyle() == BulkDataExportOptions.ExportStyle.PATIENT) {
// Patient
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export";
ourLog.error(errorMessage);
throw new IllegalStateException(Msg.code(797) + errorMessage);
}
List<SearchParameterMap> maps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(def, theParams);
String patientSearchParam = getPatientSearchParamForCurrentResourceType(theParams.getResourceType()).getName();
for (SearchParameterMap map : maps) {
//Ensure users did not monkey with the patient compartment search parameter.
validateSearchParametersForPatient(map, theParams);
ISearchBuilder searchBuilder = getSearchBuilderForLocalResourceType(theParams);
if (!resourceType.equalsIgnoreCase("Patient")) {
map.add(patientSearchParam, new ReferenceParam().setMissing(false));
}
IResultIterator resultIterator = searchBuilder.createQuery(map,
new SearchRuntimeDetails(null, jobId),
null,
RequestPartitionId.allPartitions());
while (resultIterator.hasNext()) {
pids.add(resultIterator.next());
}
}
}
else if (theParams.getExportStyle() == BulkDataExportOptions.ExportStyle.GROUP) {
// Group
if (resourceType.equalsIgnoreCase("Patient")) {
return getExpandedPatientIterator(theParams);
}
Set<String> expandedMemberResourceIds = expandAllPatientPidsFromGroup(theParams);
if (ourLog.isDebugEnabled()) {
ourLog.debug("Group/{} has been expanded to members:[{}]", theParams, String.join(",", expandedMemberResourceIds));
}
//Next, let's search for the target resources, with their correct patient references, chunked.
//The results will be jammed into myReadPids
QueryChunker<String> queryChunker = new QueryChunker<>();
queryChunker.chunk(new ArrayList<>(expandedMemberResourceIds), QUERY_CHUNK_SIZE, (idChunk) -> {
queryResourceTypeWithReferencesToPatients(pids, idChunk, theParams, def);
});
}
else {
// System
List<SearchParameterMap> maps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(def, theParams);
ISearchBuilder searchBuilder = getSearchBuilderForLocalResourceType(theParams);
for (SearchParameterMap map : maps) {
// requires a transaction
IResultIterator resultIterator = searchBuilder.createQuery(map,
new SearchRuntimeDetails(null, jobId),
null,
RequestPartitionId.allPartitions());
while (resultIterator.hasNext()) {
pids.add(resultIterator.next());
}
}
}
return pids.iterator();
}
/**
* Get and cache an ISearchBuilder for the given resource type this partition is responsible for.
*/
protected ISearchBuilder getSearchBuilderForLocalResourceType(ExportPIDIteratorParameters theParams) {
String resourceType = theParams.getResourceType();
if (!myResourceTypeToSearchBuilder.containsKey(resourceType)) {
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceType);
RuntimeResourceDefinition def = myContext.getResourceDefinition(resourceType);
Class<? extends IBaseResource> nextTypeClass = def.getImplementingClass();
ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceType, nextTypeClass);
myResourceTypeToSearchBuilder.put(resourceType, sb);
}
return myResourceTypeToSearchBuilder.get(resourceType);
}
protected RuntimeSearchParam getPatientSearchParamForCurrentResourceType(String theResourceType) {
RuntimeSearchParam searchParam = null;
Optional<RuntimeSearchParam> onlyPatientSearchParamForResourceType = SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, theResourceType);
if (onlyPatientSearchParamForResourceType.isPresent()) {
searchParam = onlyPatientSearchParamForResourceType.get();
}
return searchParam;
}
@Override
public void expandMdmResources(List<IBaseResource> theResources) {
for (IBaseResource resource : theResources) {
if (!PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(resource.fhirType())) {
annotateBackwardsReferences(resource);
}
}
// is this necessary?
myResourceTypeToFhirPath.clear();
}
/**
* For Patient
**/
private RuntimeSearchParam validateSearchParametersForPatient(SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) {
RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType(theParams.getResourceType());
if (expandedSpMap.get(runtimeSearchParam.getName()) != null) {
throw new IllegalArgumentException(Msg.code(796) + String.format("Patient Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName()));
}
return runtimeSearchParam;
}
/**
* for group exports
**/
private void validateSearchParametersForGroup(SearchParameterMap expandedSpMap, String theResourceType) {
// we only validate for certain types
if (!PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(theResourceType)) {
RuntimeSearchParam runtimeSearchParam = getPatientSearchParamForCurrentResourceType(theResourceType);
if (expandedSpMap.get(runtimeSearchParam.getName()) != null) {
throw new IllegalArgumentException(Msg.code(792) + String.format("Group Bulk Export manually modifies the Search Parameter called [%s], so you may not include this search parameter in your _typeFilter!", runtimeSearchParam.getName()));
}
}
}
/**
* In case we are doing a Group Bulk Export and resourceType `Patient` is requested, we can just return the group members,
* possibly expanded by MDM, and don't have to go and fetch other resource DAOs.
*/
private Iterator<ResourcePersistentId> getExpandedPatientIterator(ExportPIDIteratorParameters theParameters) {
List<String> members = getMembers(theParameters.getGroupId());
List<IIdType> ids = members.stream().map(member -> new IdDt("Patient/" + member)).collect(Collectors.toList());
List<Long> pidsOrThrowException = myJpaIdHelperService.getPidsOrThrowException(ids);
Set<Long> patientPidsToExport = new HashSet<>(pidsOrThrowException);
if (theParameters.isExpandMdm()) {
SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions();
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(theParameters.getGroupId()), srd);
Long pidOrNull = myJpaIdHelperService.getPidOrNull(group);
List<IMdmLinkDao.MdmPidTuple> goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
goldenPidSourcePidTuple.forEach(tuple -> {
patientPidsToExport.add(tuple.getGoldenPid());
patientPidsToExport.add(tuple.getSourcePid());
});
populateMdmResourceCache(goldenPidSourcePidTuple);
}
List<ResourcePersistentId> resourcePersistentIds = patientPidsToExport
.stream()
.map(ResourcePersistentId::new)
.collect(Collectors.toList());
return resourcePersistentIds.iterator();
}
/**
* Given the local myGroupId, read this group, and find all members' patient references.
*
* @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"]
*/
private List<String> getMembers(String theGroupId) {
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(theGroupId), requestDetails);
List<IPrimitiveType> evaluate = myContext.newFhirPath().evaluate(group, "member.entity.reference", IPrimitiveType.class);
return evaluate.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList());
}
/**
* @param thePidTuples
*/
private void populateMdmResourceCache(List<IMdmLinkDao.MdmPidTuple> thePidTuples) {
if (myMdmExpansionCacheSvc.hasBeenPopulated()) {
return;
}
//First, convert this zipped set of tuples to a map of
//{
// patient/gold-1 -> [patient/1, patient/2]
// patient/gold-2 -> [patient/3, patient/4]
//}
Map<Long, Set<Long>> goldenResourceToSourcePidMap = new HashMap<>();
extract(thePidTuples, goldenResourceToSourcePidMap);
//Next, lets convert it to an inverted index for fast lookup
// {
// patient/1 -> patient/gold-1
// patient/2 -> patient/gold-1
// patient/3 -> patient/gold-2
// patient/4 -> patient/gold-2
// }
Map<String, String> sourceResourceIdToGoldenResourceIdMap = new HashMap<>();
goldenResourceToSourcePidMap.forEach((key, value) -> {
String goldenResourceId = myIdHelperService.translatePidIdToForcedIdWithCache(new ResourcePersistentId(key)).orElse(key.toString());
Map<Long, Optional<String>> pidsToForcedIds = myIdHelperService.translatePidsToForcedIds(value);
Set<String> sourceResourceIds = pidsToForcedIds.entrySet().stream()
.map(ent -> ent.getValue().isPresent() ? ent.getValue().get() : ent.getKey().toString())
.collect(Collectors.toSet());
sourceResourceIds
.forEach(sourceResourceId -> sourceResourceIdToGoldenResourceIdMap.put(sourceResourceId, goldenResourceId));
});
//Now that we have built our cached expansion, store it.
myMdmExpansionCacheSvc.setCacheContents(sourceResourceIdToGoldenResourceIdMap);
}
private void extract(List<IMdmLinkDao.MdmPidTuple> theGoldenPidTargetPidTuples, Map<Long, Set<Long>> theGoldenResourceToSourcePidMap) {
for (IMdmLinkDao.MdmPidTuple goldenPidTargetPidTuple : theGoldenPidTargetPidTuples) {
Long goldenPid = goldenPidTargetPidTuple.getGoldenPid();
Long sourcePid = goldenPidTargetPidTuple.getSourcePid();
theGoldenResourceToSourcePidMap.computeIfAbsent(goldenPid, key -> new HashSet<>()).add(sourcePid);
}
}
private void queryResourceTypeWithReferencesToPatients(Set<ResourcePersistentId> myReadPids,
List<String> idChunk,
ExportPIDIteratorParameters theParams,
RuntimeResourceDefinition theDef) {
//Build SP map
//First, inject the _typeFilters and _since from the export job
List<SearchParameterMap> expandedSpMaps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(theDef, theParams);
for (SearchParameterMap expandedSpMap : expandedSpMaps) {
//Since we are in a bulk job, we have to ensure the user didn't jam in a patient search param, since we need to manually set that.
validateSearchParametersForGroup(expandedSpMap, theParams.getResourceType());
// Fetch and cache a search builder for this resource type
ISearchBuilder searchBuilder = getSearchBuilderForLocalResourceType(theParams);
// Now, further filter the query with patient references defined by the chunk of IDs we have.
if (PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(theParams.getResourceType())) {
filterSearchByHasParam(idChunk, expandedSpMap, theParams);
} else {
filterSearchByResourceIds(idChunk, expandedSpMap, theParams);
}
//Execute query and all found pids to our local iterator.
IResultIterator resultIterator = searchBuilder.createQuery(expandedSpMap,
new SearchRuntimeDetails(null, theParams.getJobId()),
null,
RequestPartitionId.allPartitions());
while (resultIterator.hasNext()) {
myReadPids.add(resultIterator.next());
}
}
}
/**
* Must not be called for resources types listed in PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES
*
* @param idChunk
* @param expandedSpMap
* @param theParams
*/
private void filterSearchByResourceIds(List<String> idChunk, SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) {
ReferenceOrListParam orList = new ReferenceOrListParam();
idChunk.forEach(id -> orList.add(new ReferenceParam(id)));
expandedSpMap.add(getPatientSearchParamForCurrentResourceType(theParams.getResourceType()).getName(), orList);
}
/**
* @param idChunk
* @param expandedSpMap
*/
private void filterSearchByHasParam(List<String> idChunk, SearchParameterMap expandedSpMap, ExportPIDIteratorParameters theParams) {
HasOrListParam hasOrListParam = new HasOrListParam();
idChunk.stream().forEach(id -> hasOrListParam.addOr(buildHasParam(id, theParams.getResourceType())));
expandedSpMap.add("_has", hasOrListParam);
}
private HasParam buildHasParam(String theId, String theResourceType) {
if ("Practitioner".equalsIgnoreCase(theResourceType)) {
return new HasParam("Patient", "general-practitioner", "_id", theId);
} else if ("Organization".equalsIgnoreCase(theResourceType)) {
return new HasParam("Patient", "organization", "_id", theId);
} else {
throw new IllegalArgumentException(Msg.code(2077) + " We can't handle forward references onto type " + theResourceType);
}
}
/**
* Given the local myGroupId, perform an expansion to retrieve all resource IDs of member patients.
* if myMdmEnabled is set to true, we also reach out to the IMdmLinkDao to attempt to also expand it into matched
* patients.
*
* @return a Set of Strings representing the resource IDs of all members of a group.
*/
private Set<String> expandAllPatientPidsFromGroup(ExportPIDIteratorParameters theParams) {
Set<String> expandedIds = new HashSet<>();
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(theParams.getGroupId()), requestDetails);
Long pidOrNull = myJpaIdHelperService.getPidOrNull(group);
//Attempt to perform MDM Expansion of membership
if (theParams.isExpandMdm()) {
List<IMdmLinkDao.MdmPidTuple> goldenPidTargetPidTuples = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
//Now lets translate these pids into resource IDs
Set<Long> uniquePids = new HashSet<>();
goldenPidTargetPidTuples.forEach(tuple -> {
uniquePids.add(tuple.getGoldenPid());
uniquePids.add(tuple.getSourcePid());
});
Map<Long, Optional<String>> pidToForcedIdMap = myIdHelperService.translatePidsToForcedIds(uniquePids);
Map<Long, Set<Long>> goldenResourceToSourcePidMap = new HashMap<>();
extract(goldenPidTargetPidTuples, goldenResourceToSourcePidMap);
populateMdmResourceCache(goldenPidTargetPidTuples);
//If the result of the translation is an empty optional, it means there is no forced id, and we can use the PID as the resource ID.
Set<String> resolvedResourceIds = pidToForcedIdMap.entrySet().stream()
.map(entry -> entry.getValue().isPresent() ? entry.getValue().get() : entry.getKey().toString())
.collect(Collectors.toSet());
expandedIds.addAll(resolvedResourceIds);
}
//Now manually add the members of the group (its possible even with mdm expansion that some members dont have MDM matches,
//so would be otherwise skipped
expandedIds.addAll(getMembers(theParams.getGroupId()));
return expandedIds;
}
/* Mdm Expansion */
private RuntimeSearchParam getRuntimeSearchParam(IBaseResource theResource) {
Optional<RuntimeSearchParam> oPatientSearchParam = SearchParameterUtil.getOnlyPatientSearchParamForResourceType(myContext, theResource.fhirType());
if (!oPatientSearchParam.isPresent()) {
String errorMessage = String.format("[%s] has no search parameters that are for patients, so it is invalid for Group Bulk Export!", theResource.fhirType());
throw new IllegalArgumentException(Msg.code(2103) + errorMessage);
} else {
return oPatientSearchParam.get();
}
}
private void annotateBackwardsReferences(IBaseResource iBaseResource) {
Optional<String> patientReference = getPatientReference(iBaseResource);
if (patientReference.isPresent()) {
addGoldenResourceExtension(iBaseResource, patientReference.get());
} else {
ourLog.error("Failed to find the patient reference information for resource {}. This is a bug, " +
"as all resources which can be exported via Group Bulk Export must reference a patient.", iBaseResource);
}
}
private Optional<String> getPatientReference(IBaseResource iBaseResource) {
String fhirPath;
String resourceType = iBaseResource.fhirType();
if (myResourceTypeToFhirPath.containsKey(resourceType)) {
fhirPath = myResourceTypeToFhirPath.get(resourceType);
} else {
RuntimeSearchParam runtimeSearchParam = getRuntimeSearchParam(iBaseResource);
fhirPath = getPatientFhirPath(runtimeSearchParam);
myResourceTypeToFhirPath.put(resourceType, fhirPath);
}
if (iBaseResource.fhirType().equalsIgnoreCase("Patient")) {
return Optional.of(iBaseResource.getIdElement().getIdPart());
} else {
Optional<IBaseReference> optionalReference = getFhirParser().evaluateFirst(iBaseResource, fhirPath, IBaseReference.class);
if (optionalReference.isPresent()) {
return optionalReference.map(theIBaseReference -> theIBaseReference.getReferenceElement().getIdPart());
} else {
return Optional.empty();
}
}
}
private void addGoldenResourceExtension(IBaseResource iBaseResource, String sourceResourceId) {
String goldenResourceId = myMdmExpansionCacheSvc.getGoldenResourceId(sourceResourceId);
IBaseExtension<?, ?> extension = ExtensionUtil.getOrCreateExtension(iBaseResource, HapiExtensions.ASSOCIATED_GOLDEN_RESOURCE_EXTENSION_URL);
if (!StringUtils.isBlank(goldenResourceId)) {
ExtensionUtil.setExtension(myContext, extension, "reference", prefixPatient(goldenResourceId));
}
}
private String prefixPatient(String theResourceId) {
return "Patient/" + theResourceId;
}
private IFhirPath getFhirParser() {
if (myFhirPath == null) {
myFhirPath = myContext.newFhirPath();
}
return myFhirPath;
}
private String getPatientFhirPath(RuntimeSearchParam theRuntimeParam) {
String path = theRuntimeParam.getPath();
// GGG: Yes this is a stupid hack, but by default this runtime search param will return stuff like
// Observation.subject.where(resolve() is Patient) which unfortunately our FHIRpath evaluator doesn't play nicely with
// our FHIRPath evaluator.
if (path.contains(".where")) {
path = path.substring(0, path.indexOf(".where"));
}
return path;
}
}

View File

@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
*/ */
import ca.uhn.fhir.jpa.batch.config.BatchConstants; import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet;
import ca.uhn.fhir.util.ValidateUtil; import ca.uhn.fhir.util.ValidateUtil;
import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.scope.context.ChunkContext;
@ -38,8 +37,16 @@ public class CreateBulkImportEntityTasklet implements Tasklet {
//We can leave early if they provided us with an existing job. //We can leave early if they provided us with an existing job.
ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BatchConstants.JOB_UUID_PARAMETER), "Job doesn't have a UUID"); ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BatchConstants.JOB_UUID_PARAMETER), "Job doesn't have a UUID");
CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BatchConstants.JOB_UUID_PARAMETER)); addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BatchConstants.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED; return RepeatStatus.FINISHED;
} }
public void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
theChunkContext
.getStepContext()
.getStepExecution()
.getJobExecution()
.getExecutionContext()
.putString(BatchConstants.JOB_UUID_PARAMETER, theJobUUID);
}
} }

View File

@ -0,0 +1,40 @@
package ca.uhn.fhir.jpa.config;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportHelperService;
import ca.uhn.fhir.jpa.bulk.export.svc.JpaBulkExportProcessor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class JpaBulkExportConfig {
@Bean
public IBulkExportProcessor jpaBulkExportProcessor() {
return new JpaBulkExportProcessor();
}
@Bean
public BulkExportHelperService bulkExportHelperService() {
return new BulkExportHelperService();
}
}

View File

@ -16,10 +16,8 @@ import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor; import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider; import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl; import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl; import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc; import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
@ -181,7 +179,8 @@ import java.util.Date;
BatchJobsConfig.class, BatchJobsConfig.class,
SearchParamConfig.class, SearchParamConfig.class,
ValidationSupportConfig.class, ValidationSupportConfig.class,
Batch2SupportConfig.class Batch2SupportConfig.class,
JpaBulkExportConfig.class
}) })
public class JpaConfig { public class JpaConfig {
public static final String JPA_VALIDATION_SUPPORT_CHAIN = "myJpaValidationSupportChain"; public static final String JPA_VALIDATION_SUPPORT_CHAIN = "myJpaValidationSupportChain";
@ -425,12 +424,6 @@ public class JpaConfig {
return new AutowiringSpringBeanJobFactory(); return new AutowiringSpringBeanJobFactory();
} }
@Bean
@Lazy
public IBulkDataExportSvc bulkDataExportSvc() {
return new BulkDataExportSvcImpl();
}
@Bean @Bean
public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper() { public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper() {
return new BulkDataExportJobSchedulingHelperImpl(); return new BulkDataExportJobSchedulingHelperImpl();

View File

@ -20,9 +20,12 @@ package ca.uhn.fhir.jpa.dao.data;
* #L% * #L%
*/ */
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity; import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query; import org.springframework.data.jpa.repository.Query;
@ -31,6 +34,8 @@ import org.springframework.data.repository.query.Param;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.Set;
public interface IBatch2JobInstanceRepository extends JpaRepository<Batch2JobInstanceEntity, String>, IHapiFhirJpaRepository { public interface IBatch2JobInstanceRepository extends JpaRepository<Batch2JobInstanceEntity, String>, IHapiFhirJpaRepository {
@Modifying @Modifying
@ -45,6 +50,27 @@ public interface IBatch2JobInstanceRepository extends JpaRepository<Batch2JobIns
@Query("UPDATE Batch2JobInstanceEntity e SET e.myCurrentGatedStepId = :currentGatedStepId WHERE e.myId = :id") @Query("UPDATE Batch2JobInstanceEntity e SET e.myCurrentGatedStepId = :currentGatedStepId WHERE e.myId = :id")
void updateInstanceCurrentGatedStepId(@Param("id") String theInstanceId, @Param("currentGatedStepId") String theCurrentGatedStepId); void updateInstanceCurrentGatedStepId(@Param("id") String theInstanceId, @Param("currentGatedStepId") String theCurrentGatedStepId);
@Query(
value = "SELECT * from Batch2JobInstanceEntity WHERE DEFINITION_ID = :defId AND PARAMS_JSON = :params AND STAT IN( :stats )",
nativeQuery = true
)
List<JobInstance> findInstancesByJobIdParamsAndStatus(
@Param("defId") String theDefinitionId,
@Param("params") String theParams,
@Param("stats") Set<StatusEnum> theStatus,
Pageable thePageable
);
@Query(
value = "SELECT * from Batch2JobInstanceEntity WHERE DEFINITION_ID = :defId AND PARAMS_JSON = :params",
nativeQuery = true
)
List<JobInstance> findInstancesByJobIdAndParams(
@Param("defId") String theDefinitionId,
@Param("params") String theParams,
Pageable thePageable
);
@Query("SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId AND e.myStatus IN :statuses") @Query("SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId AND e.myStatus IN :statuses")
List<Batch2JobInstanceEntity> fetchInstancesByJobDefinitionIdAndStatus(@Param("jobDefinitionId") String theJobDefinitionId, @Param("statuses") Set<StatusEnum> theIncompleteStatuses, Pageable thePageRequest); List<Batch2JobInstanceEntity> fetchInstancesByJobDefinitionIdAndStatus(@Param("jobDefinitionId") String theJobDefinitionId, @Param("statuses") Set<StatusEnum> theIncompleteStatuses, Pageable thePageRequest);

View File

@ -36,6 +36,9 @@ public interface IBatch2WorkChunkRepository extends JpaRepository<Batch2WorkChun
@Query("SELECT e FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC") @Query("SELECT e FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId ORDER BY e.mySequence ASC")
List<Batch2WorkChunkEntity> fetchChunks(Pageable thePageRequest, @Param("instanceId") String theInstanceId); List<Batch2WorkChunkEntity> fetchChunks(Pageable thePageRequest, @Param("instanceId") String theInstanceId);
@Query("SELECT e FROM Batch2WorkChunkEntity e WHERE e.myInstanceId = :instanceId AND e.myTargetStepId = :targetStepId ORDER BY e.mySequence ASC")
List<Batch2WorkChunkEntity> fetchChunksForStep(Pageable thePageRequest, @Param("instanceId") String theInstanceId, @Param("targetStepId") String theTargetStepId);
@Modifying @Modifying
@Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.myRecordsProcessed = :rp, e.mySerializedData = null WHERE e.myId = :id") @Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.myRecordsProcessed = :rp, e.mySerializedData = null WHERE e.myId = :id")
void updateChunkStatusAndClearDataForEndSuccess(@Param("id") String theChunkId, @Param("et") Date theEndTime, @Param("rp") int theRecordsProcessed, @Param("status") StatusEnum theInProgress); void updateChunkStatusAndClearDataForEndSuccess(@Param("id") String theChunkId, @Param("et") Date theEndTime, @Param("rp") int theRecordsProcessed, @Param("status") StatusEnum theInProgress);

View File

@ -26,6 +26,7 @@ import org.springframework.data.repository.query.Param;
* #L% * #L%
*/ */
@Deprecated
public interface IBulkExportCollectionDao extends JpaRepository<BulkExportCollectionEntity, Long>, IHapiFhirJpaRepository { public interface IBulkExportCollectionDao extends JpaRepository<BulkExportCollectionEntity, Long>, IHapiFhirJpaRepository {
@Modifying @Modifying

View File

@ -27,8 +27,16 @@ import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
/*
* These classes are no longer needed.
* Metadata on the job is contained in the job itself
* (no separate storage required).
*
* See the BulkExportAppCtx for job details
*/
@Entity @Entity
@Table(name = "HFJ_BLK_EXPORT_COLLECTION") @Table(name = "HFJ_BLK_EXPORT_COLLECTION")
@Deprecated
public class BulkExportCollectionEntity implements Serializable { public class BulkExportCollectionEntity implements Serializable {
@Id @Id

View File

@ -35,8 +35,17 @@ import javax.persistence.SequenceGenerator;
import javax.persistence.Table; import javax.persistence.Table;
import java.io.Serializable; import java.io.Serializable;
/*
* These classes are no longer needed.
* Metadata on the job is contained in the job itself
* (no separate storage required).
*
* See the BulkExportAppCtx for job details
*/
@Entity @Entity
@Table(name = "HFJ_BLK_EXPORT_COLFILE") @Table(name = "HFJ_BLK_EXPORT_COLFILE")
@Deprecated
public class BulkExportCollectionFileEntity implements Serializable { public class BulkExportCollectionFileEntity implements Serializable {
@Id @Id

View File

@ -49,12 +49,21 @@ import java.util.Date;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.left;
/*
* These classes are no longer needed.
* Metadata on the job is contained in the job itself
* (no separate storage required).
*
* See the BulkExportAppCtx for job details
*/
@Entity @Entity
@Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = { @Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = {
@UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID") @UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")
}, indexes = { }, indexes = {
@Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME") @Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
}) })
@Deprecated
public class BulkExportJobEntity implements Serializable { public class BulkExportJobEntity implements Serializable {
public static final int REQUEST_LENGTH = 1024; public static final int REQUEST_LENGTH = 1024;

View File

@ -0,0 +1,94 @@
package ca.uhn.fhir.jpa.util;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
import javax.annotation.Nonnull;
public class JobInstanceUtil {
private JobInstanceUtil() {}
/**
* Converts a Batch2JobInstanceEntity into a JobInstance object
* @param theEntity - the entity
* @return - a job instance
*/
@Nonnull
public static JobInstance fromEntityToInstance(@Nonnull Batch2JobInstanceEntity theEntity) {
JobInstance retVal = new JobInstance();
retVal.setInstanceId(theEntity.getId());
retVal.setJobDefinitionId(theEntity.getDefinitionId());
retVal.setJobDefinitionVersion(theEntity.getDefinitionVersion());
retVal.setStatus(theEntity.getStatus());
retVal.setCancelled(theEntity.isCancelled());
retVal.setStartTime(theEntity.getStartTime());
retVal.setCreateTime(theEntity.getCreateTime());
retVal.setEndTime(theEntity.getEndTime());
retVal.setCombinedRecordsProcessed(theEntity.getCombinedRecordsProcessed());
retVal.setCombinedRecordsProcessedPerSecond(theEntity.getCombinedRecordsProcessedPerSecond());
retVal.setTotalElapsedMillis(theEntity.getTotalElapsedMillis());
retVal.setWorkChunksPurged(theEntity.getWorkChunksPurged());
retVal.setProgress(theEntity.getProgress());
retVal.setErrorMessage(theEntity.getErrorMessage());
retVal.setErrorCount(theEntity.getErrorCount());
retVal.setEstimatedTimeRemaining(theEntity.getEstimatedTimeRemaining());
retVal.setParameters(theEntity.getParams());
retVal.setCurrentGatedStepId(theEntity.getCurrentGatedStepId());
retVal.setReport(theEntity.getReport());
retVal.setEstimatedTimeRemaining(theEntity.getEstimatedTimeRemaining());
return retVal;
}
/**
* Converts a Batch2WorkChunkEntity into a WorkChunk object
* @param theEntity - the entity to convert
* @param theIncludeData - whether or not to include the Data attached to the chunk
* @return - the WorkChunk object
*/
@Nonnull
public static WorkChunk fromEntityToWorkChunk(@Nonnull Batch2WorkChunkEntity theEntity, boolean theIncludeData) {
WorkChunk retVal = new WorkChunk();
retVal.setId(theEntity.getId());
retVal.setSequence(theEntity.getSequence());
retVal.setJobDefinitionId(theEntity.getJobDefinitionId());
retVal.setJobDefinitionVersion(theEntity.getJobDefinitionVersion());
retVal.setInstanceId(theEntity.getInstanceId());
retVal.setTargetStepId(theEntity.getTargetStepId());
retVal.setStatus(theEntity.getStatus());
retVal.setCreateTime(theEntity.getCreateTime());
retVal.setStartTime(theEntity.getStartTime());
retVal.setEndTime(theEntity.getEndTime());
retVal.setErrorMessage(theEntity.getErrorMessage());
retVal.setErrorCount(theEntity.getErrorCount());
retVal.setRecordsProcessed(theEntity.getRecordsProcessed());
if (theIncludeData) {
if (theEntity.getSerializedData() != null) {
retVal.setData(theEntity.getSerializedData());
}
}
return retVal;
}
}

View File

@ -1,20 +1,46 @@
package ca.uhn.fhir.jpa.batch2; package ca.uhn.fhir.jpa.batch2;
import ca.uhn.fhir.batch2.api.JobOperationResultJson; import ca.uhn.fhir.batch2.api.JobOperationResultJson;
import ca.uhn.fhir.batch2.model.FetchJobInstancesRequest;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity; import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
import ca.uhn.fhir.jpa.util.JobInstanceUtil;
import ca.uhn.fhir.model.api.PagingIterator;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class) @ExtendWith(MockitoExtension.class)
@ -68,4 +94,244 @@ class JpaJobPersistenceImplTest {
assertFalse(result.getSuccess()); assertFalse(result.getSuccess());
assertEquals("Job instance <test-instance-id> was already cancelled. Nothing to do.", result.getMessage()); assertEquals("Job instance <test-instance-id> was already cancelled. Nothing to do.", result.getMessage());
} }
@Test
public void markInstanceAsCompleted_withInstanceId_updatesToCompleted() {
// setup
String jobId = "jobid";
// test
mySvc.markInstanceAsCompleted(jobId);
// verify
ArgumentCaptor<StatusEnum> statusCaptor = ArgumentCaptor.forClass(StatusEnum.class);
verify(myJobInstanceRepository)
.updateInstanceStatus(eq(jobId), statusCaptor.capture());
assertEquals(StatusEnum.COMPLETED, statusCaptor.getValue());
}
@Test
public void deleteChunks_withInstanceId_callsChunkRepoDelete() {
// setup
String jobId = "jobid";
// test
mySvc.deleteChunks(jobId);
// verify
verify(myWorkChunkRepository)
.deleteAllForInstance(eq(jobId));
}
@Test
public void deleteInstanceAndChunks_withInstanceId_callsBothWorkchunkAndJobRespositoryDeletes() {
// setup
String jobid = "jobid";
// test
mySvc.deleteInstanceAndChunks(jobid);
// verify
verify(myWorkChunkRepository)
.deleteAllForInstance(eq(jobid));
verify(myJobInstanceRepository)
.deleteById(eq(jobid));
}
@Test
public void updateInstance_withInstance_checksInstanceExistsAndCallsSave() {
// setup
JobInstance toSave = createJobInstanceWithDemoData();
Batch2JobInstanceEntity entity = new Batch2JobInstanceEntity();
entity.setId(toSave.getInstanceId());
// when
when(myJobInstanceRepository.findById(eq(toSave.getInstanceId())))
.thenReturn(Optional.of(entity));
// test
mySvc.updateInstance(toSave);
// verify
ArgumentCaptor<Batch2JobInstanceEntity> entityCaptor = ArgumentCaptor.forClass(Batch2JobInstanceEntity.class);
verify(myJobInstanceRepository)
.save(entityCaptor.capture());
Batch2JobInstanceEntity saved = entityCaptor.getValue();
assertEquals(toSave.getInstanceId(), saved.getId());
assertEquals(toSave.getStatus(), saved.getStatus());
assertEquals(toSave.getStartTime(), entity.getStartTime());
assertEquals(toSave.getEndTime(), entity.getEndTime());
assertEquals(toSave.isCancelled(), entity.isCancelled());
assertEquals(toSave.getCombinedRecordsProcessed(), entity.getCombinedRecordsProcessed());
assertEquals(toSave.getCombinedRecordsProcessedPerSecond(), entity.getCombinedRecordsProcessedPerSecond());
assertEquals(toSave.getTotalElapsedMillis(), entity.getTotalElapsedMillis());
assertEquals(toSave.isWorkChunksPurged(), entity.getWorkChunksPurged());
assertEquals(toSave.getProgress(), entity.getProgress());
assertEquals(toSave.getErrorMessage(), entity.getErrorMessage());
assertEquals(toSave.getErrorCount(), entity.getErrorCount());
assertEquals(toSave.getEstimatedTimeRemaining(), entity.getEstimatedTimeRemaining());
assertEquals(toSave.getCurrentGatedStepId(), entity.getCurrentGatedStepId());
assertEquals(toSave.getReport(), entity.getReport());
}
@Test
public void updateInstance_invalidId_throwsIllegalArgumentException() {
// setup
JobInstance instance = createJobInstanceWithDemoData();
// when
when(myJobInstanceRepository.findById(anyString()))
.thenReturn(Optional.empty());
// test
try {
mySvc.updateInstance(instance);
fail();
} catch (IllegalArgumentException ex) {
assertTrue(ex.getMessage().contains("Unknown instance ID: " + instance.getInstanceId()));
}
}
@Test
public void fetchAllWorkChunksIterator_withValidIdAndBoolToSayToIncludeData_returnsPagingIterator() {
// setup
String instanceId = "instanceId";
String jobDefinition = "definitionId";
int version = 1;
String targetStep = "step";
List<Batch2WorkChunkEntity> workChunkEntityList = new ArrayList<>();
Batch2WorkChunkEntity chunk1 = new Batch2WorkChunkEntity();
chunk1.setId("id1");
chunk1.setJobDefinitionVersion(version);
chunk1.setJobDefinitionId(jobDefinition);
chunk1.setSerializedData("serialized data 1");
chunk1.setTargetStepId(targetStep);
workChunkEntityList.add(chunk1);
Batch2WorkChunkEntity chunk2 = new Batch2WorkChunkEntity();
chunk2.setId("id2");
chunk2.setSerializedData("serialized data 2");
chunk2.setJobDefinitionId(jobDefinition);
chunk2.setJobDefinitionVersion(version);
chunk2.setTargetStepId(targetStep);
workChunkEntityList.add(chunk2);
for (boolean includeData : new boolean[] { true , false }) {
// when
when(myWorkChunkRepository.fetchChunks(any(PageRequest.class), eq(instanceId)))
.thenReturn(workChunkEntityList);
// test
Iterator<WorkChunk> chunkIterator = mySvc.fetchAllWorkChunksIterator(instanceId, includeData);
// verify
assertTrue(chunkIterator instanceof PagingIterator);
verify(myWorkChunkRepository, never())
.fetchChunks(any(PageRequest.class), anyString());
// now try the iterator out...
WorkChunk chunk = chunkIterator.next();
assertEquals(chunk1.getId(), chunk.getId());
if (includeData) {
assertEquals(chunk1.getSerializedData(), chunk.getData());
} else {
assertNull(chunk.getData());
}
chunk = chunkIterator.next();
assertEquals(chunk2.getId(), chunk.getId());
if (includeData) {
assertEquals(chunk2.getSerializedData(), chunk.getData());
} else {
assertNull(chunk.getData());
}
verify(myWorkChunkRepository)
.fetchChunks(any(PageRequest.class), eq(instanceId));
reset(myWorkChunkRepository);
}
}
@Test
public void fetchInstances_validRequest_returnsFoundInstances() {
// setup
int pageStart = 1;
int pageSize = 132;
JobInstance job1 = createJobInstanceWithDemoData();
FetchJobInstancesRequest req = new FetchJobInstancesRequest(job1.getInstanceId(), "params");
JobInstance job2 = createJobInstanceWithDemoData();
List<JobInstance> instances = Arrays.asList(job1, job2);
// when
when(myJobInstanceRepository
.findInstancesByJobIdAndParams(eq(req.getJobDefinition()),
eq(req.getParameters()),
any(Pageable.class)))
.thenReturn(instances);
// test
List<JobInstance> retInstances = mySvc.fetchInstances(req, pageStart, pageSize);
// verify
assertEquals(instances.size(), retInstances.size());
assertEquals(instances, retInstances);
ArgumentCaptor<Pageable> pageableCaptor = ArgumentCaptor.forClass(Pageable.class);
verify(myJobInstanceRepository)
.findInstancesByJobIdAndParams(
eq(req.getJobDefinition()),
eq(req.getParameters()),
pageableCaptor.capture()
);
Pageable pageable = pageableCaptor.getValue();
assertEquals(pageStart, pageable.getPageNumber());
assertEquals(pageSize, pageable.getPageSize());
}
@Test
public void fetchInstance_validId_returnsInstance() {
// setup
Batch2JobInstanceEntity entity = createBatch2JobInstanceEntity();
JobInstance instance = createJobInstanceFromEntity(entity);
// when
when(myJobInstanceRepository.findById(eq(instance.getInstanceId())))
.thenReturn(Optional.of(entity));
// test
Optional<JobInstance> retInstance = mySvc.fetchInstance(entity.getId());
// verify
assertTrue(retInstance.isPresent());
assertEquals(instance.getInstanceId(), retInstance.get().getInstanceId());
}
private JobInstance createJobInstanceWithDemoData() {
return createJobInstanceFromEntity(createBatch2JobInstanceEntity());
}
private JobInstance createJobInstanceFromEntity(Batch2JobInstanceEntity theEntity) {
return JobInstanceUtil.fromEntityToInstance(theEntity);
}
private Batch2JobInstanceEntity createBatch2JobInstanceEntity() {
Batch2JobInstanceEntity entity = new Batch2JobInstanceEntity();
entity.setId("id");
entity.setStartTime(new Date(2000, 1, 2));
entity.setEndTime(new Date(2000, 2, 3));
entity.setStatus(StatusEnum.COMPLETED);
entity.setCancelled(true);
entity.setCombinedRecordsProcessed(12);
entity.setCombinedRecordsProcessedPerSecond(2d);
entity.setTotalElapsedMillis(1000);
entity.setWorkChunksPurged(true);
entity.setProgress(22d);
entity.setErrorMessage("1232");
entity.setErrorCount(9);
entity.setEstimatedTimeRemaining("blah");
entity.setCurrentGatedStepId("stepid");
entity.setReport("report");
return entity;
}
} }

View File

@ -0,0 +1,440 @@
package ca.uhn.fhir.jpa.bulk.export.svc;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.fhirpath.IFhirPath;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.bulk.export.model.ExportPIDIteratorParameters;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
import ca.uhn.fhir.jpa.dao.index.IJpaIdHelperService;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.Group;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class JpaBulkExportProcessorTest {
private class ListResultIterator implements IResultIterator {
private List<ResourcePersistentId> myList;
private int index;
public ListResultIterator(List<ResourcePersistentId> theList) {
myList = theList;
}
@Override
public int getSkippedCount() {
return 0;
}
@Override
public int getNonSkippedCount() {
return 0;
}
@Override
public Collection<ResourcePersistentId> getNextResultBatch(long theBatchSize) {
return null;
}
@Override
public void close() throws IOException {
}
@Override
public boolean hasNext() {
return index < myList.size();
}
@Override
public ResourcePersistentId next() {
return myList.get(index++);
}
}
@Spy
private FhirContext myFhirContext = FhirContext.forR4Cached();
@Mock
private BulkExportHelperService myBulkExportHelperService;
@Mock
private DaoConfig myDaoConfig;
@Mock
private DaoRegistry myDaoRegistry;
@Mock
private SearchBuilderFactory mySearchBuilderFactory;
@Mock
private IIdHelperService myIdHelperService;
@Mock
private IMdmLinkDao myMdmLinkDao;
@Mock
private IJpaIdHelperService myJpaIdHelperService;
@Mock
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
@InjectMocks
private JpaBulkExportProcessor myProcessor;
private ExportPIDIteratorParameters createExportParameters(BulkDataExportOptions.ExportStyle theExportStyle) {
ExportPIDIteratorParameters parameters = new ExportPIDIteratorParameters();
parameters.setJobId("jobId");
parameters.setExportStyle(theExportStyle);
if (theExportStyle == BulkDataExportOptions.ExportStyle.GROUP) {
parameters.setGroupId("123");
}
parameters.setStartDate(new Date());
return parameters;
}
private List<IPrimitiveType> createPatientTypes() {
long id1 = 123;
long id2 = 456;
String patient1Id = "Patient/" + id1;
String patient2Id = "Patient/" + id2;
List<IPrimitiveType> patientTypes = Arrays.asList(
new IdDt(patient1Id),
new IdDt(patient2Id)
);
return patientTypes;
}
private IMdmLinkDao.MdmPidTuple createTuple(long theGroupId, long theGoldenId) {
return new IMdmLinkDao.MdmPidTuple() {
@Override
public Long getGoldenPid() {
return theGoldenId;
}
@Override
public Long getSourcePid() {
return theGroupId;
}
};
}
@Test
public void getResourcePidIterator_paramsWithPatientExportStyle_returnsAnIterator() {
// setup
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.PATIENT);
parameters.setResourceType("Patient");
SearchParameterMap map = new SearchParameterMap();
List<SearchParameterMap> maps = new ArrayList<>();
maps.add(map);
ResourcePersistentId pid = new ResourcePersistentId("Patient/123");
ResourcePersistentId pid2 = new ResourcePersistentId("Observation/123");
ListResultIterator resultIterator = new ListResultIterator(
Arrays.asList(pid, pid2)
);
// extra mocks
IFhirResourceDao<?> mockDao = mock(IFhirResourceDao.class);
ISearchBuilder searchBuilder = mock(ISearchBuilder.class);
// when
when(myDaoConfig.getIndexMissingFields())
.thenReturn(DaoConfig.IndexEnabledEnum.ENABLED);
when(myBulkExportHelperService.createSearchParameterMapsForResourceType(any(RuntimeResourceDefinition.class), eq(parameters)))
.thenReturn(maps);
// from getSearchBuilderForLocalResourceType
when(myDaoRegistry.getResourceDao(anyString()))
.thenReturn(mockDao);
when(mySearchBuilderFactory.newSearchBuilder(eq(mockDao), eq(parameters.getResourceType()), any()))
.thenReturn(searchBuilder);
// ret
when(searchBuilder.createQuery(
eq(map),
any(SearchRuntimeDetails.class),
any(),
eq(RequestPartitionId.allPartitions())))
.thenReturn(resultIterator);
// test
Iterator<ResourcePersistentId> pidIterator = myProcessor.getResourcePidIterator(parameters);
// verify
assertNotNull(pidIterator);
assertTrue(pidIterator.hasNext());
assertEquals(pid, pidIterator.next());
assertTrue(pidIterator.hasNext());
assertEquals(pid2, pidIterator.next());
assertFalse(pidIterator.hasNext());
}
@Test
public void getResourcePidIterator_patientStyleWithIndexMissingFieldsDisabled_throws() {
// setup
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.PATIENT);
parameters.setResourceType("Patient");
// when
when(myDaoConfig.getIndexMissingFields())
.thenReturn(DaoConfig.IndexEnabledEnum.DISABLED);
// test
try {
myProcessor.getResourcePidIterator(parameters);
fail();
} catch (IllegalStateException ex) {
assertTrue(ex.getMessage().contains("You attempted to start a Patient Bulk Export,"));
}
}
@ParameterizedTest
@ValueSource(
booleans = { true, false }
)
public void getResourcePidIterator_groupExportStyleWithPatientResource_returnsIterator(boolean theMdm) {
// setup
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
parameters.setResourceType("Patient");
long groupId = Long.parseLong(parameters.getGroupId());
long groupGoldenPid = 4567l;
Group groupResource = new Group();
groupResource.setId(parameters.getGroupId());
List<IPrimitiveType> patientTypes = createPatientTypes();
List<Long> pids = new ArrayList<>();
for (IPrimitiveType type : patientTypes) {
pids.add(((IdDt) type).getIdPartAsLong());
}
IMdmLinkDao.MdmPidTuple tuple = createTuple(groupId, groupGoldenPid);
IFhirResourceDao<Group> groupDao = mock(IFhirResourceDao.class);
IFhirPath path = mock(IFhirPath.class);
parameters.setExpandMdm(theMdm); // set mdm expansion
// when
// getMembers
when(myDaoRegistry.getResourceDao(eq("Group")))
.thenReturn(groupDao);
when(groupDao.read(eq(new IdDt(parameters.getGroupId())), any(SystemRequestDetails.class)))
.thenReturn(groupResource);
when(myFhirContext.newFhirPath())
.thenReturn(path);
when(path.evaluate(eq(groupResource), anyString(), any(Class.class)))
.thenReturn(patientTypes);
when(myJpaIdHelperService.getPidsOrThrowException(anyList()))
.thenReturn(pids);
// mdm expansion stuff
if (theMdm) {
when(myJpaIdHelperService.getPidOrNull(any(Group.class)))
.thenReturn(groupId);
when(myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(anyLong(), eq(MdmMatchResultEnum.MATCH)))
.thenReturn(Collections.singletonList(tuple));
when(myMdmExpansionCacheSvc.hasBeenPopulated())
.thenReturn(false); // does not matter, since if false, it then goes and populates
}
// test
Iterator<ResourcePersistentId> pidIterator = myProcessor.getResourcePidIterator(parameters);
// verify
assertNotNull(pidIterator);
int count = 0;
assertTrue(pidIterator.hasNext());
while (pidIterator.hasNext()) {
ResourcePersistentId pid = pidIterator.next();
long idAsLong = pid.getIdAsLong();
boolean existing = pids.contains(idAsLong);
if (!existing) {
assertTrue(theMdm);
assertEquals(groupGoldenPid, idAsLong);
} else {
count++;
}
}
int total = pids.size();
assertEquals(total, count);
}
@ParameterizedTest
@ValueSource(booleans = { false, true })
public void getResourcePidIterator_groupExportNonPatient_returnsIterator(boolean theMdm) {
// setup
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
parameters.setResourceType("Observation");
long groupId = Long.parseLong(parameters.getGroupId());
Group groupResource = new Group();
groupResource.setId(parameters.getGroupId());
long groupGoldenPid = 4567l;
ResourcePersistentId pid = new ResourcePersistentId("Patient/123");
ResourcePersistentId pid2 = new ResourcePersistentId("Observation/123");
ListResultIterator resultIterator = new ListResultIterator(
Arrays.asList(pid, pid2)
);
IMdmLinkDao.MdmPidTuple tuple = createTuple(groupId, groupGoldenPid);
List<IPrimitiveType> patientTypes = createPatientTypes();
IFhirResourceDao<Group> groupDao = mock(IFhirResourceDao.class);
IFhirResourceDao<Observation> observationDao = mock(IFhirResourceDao.class);
parameters.setExpandMdm(theMdm); // set mdm expansion
IFhirPath path = mock(IFhirPath.class);
ISearchBuilder searchBuilder = mock(ISearchBuilder.class);
// when
// getMembers
when(myDaoRegistry.getResourceDao(eq("Group")))
.thenReturn(groupDao);
when(groupDao.read(any(IIdType.class), any(SystemRequestDetails.class)))
.thenReturn(groupResource);
when(myJpaIdHelperService.getPidOrNull(eq(groupResource)))
.thenReturn(groupId);
if (theMdm) {
when(myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(anyLong(), eq(MdmMatchResultEnum.MATCH)))
.thenReturn(Collections.singletonList(tuple));
when(myIdHelperService.translatePidsToForcedIds(any(Set.class)))
.thenAnswer(params -> {
Set<Long> uniqPids = params.getArgument(0);
HashMap<Long, Optional<String>> answer = new HashMap<>();
for (long l : uniqPids) {
answer.put(l, Optional.empty());
}
return answer;
});
}
when(myFhirContext.newFhirPath())
.thenReturn(path);
when(path.evaluate(eq(groupResource), anyString(), any(Class.class)))
.thenReturn(patientTypes);
when(myBulkExportHelperService.createSearchParameterMapsForResourceType(any(RuntimeResourceDefinition.class), any(ExportPIDIteratorParameters.class)))
.thenReturn(Collections.singletonList(new SearchParameterMap()));
when(myDaoRegistry.getResourceDao(eq("Observation")))
.thenReturn(observationDao);
when(mySearchBuilderFactory.newSearchBuilder(any(), eq("Observation"), any()))
.thenReturn(searchBuilder);
when(searchBuilder.createQuery(any(SearchParameterMap.class),
any(SearchRuntimeDetails.class),
any(),
eq(RequestPartitionId.allPartitions())))
.thenReturn(resultIterator);
// test
Iterator<ResourcePersistentId> pidIterator = myProcessor.getResourcePidIterator(parameters);
// verify
assertNotNull(pidIterator, "PID iterator null for mdm = " + theMdm);
assertTrue(pidIterator.hasNext(), "PID iterator empty for mdm = " + theMdm);
}
@Test
public void getResourcePidIterator_systemExport_returnsIterator() {
// setup
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.SYSTEM);
parameters.setResourceType("Patient");
ResourcePersistentId pid = new ResourcePersistentId("Patient/123");
ResourcePersistentId pid2 = new ResourcePersistentId("Observation/123");
ListResultIterator resultIterator = new ListResultIterator(
Arrays.asList(pid, pid2)
);
// extra mocks
IFhirResourceDao<Patient> dao = mock(IFhirResourceDao.class);
ISearchBuilder searchBuilder = mock(ISearchBuilder.class);
// when
when(myBulkExportHelperService.createSearchParameterMapsForResourceType(
any(RuntimeResourceDefinition.class),
any(ExportPIDIteratorParameters.class)
)).thenReturn(Collections.singletonList(new SearchParameterMap()));
when(myDaoRegistry.getResourceDao(eq("Patient")))
.thenReturn(dao);
when(mySearchBuilderFactory.newSearchBuilder(
any(IFhirResourceDao.class),
anyString(),
any()
)).thenReturn(searchBuilder);
when(searchBuilder.createQuery(
any(SearchParameterMap.class),
any(SearchRuntimeDetails.class),
any(),
eq(RequestPartitionId.allPartitions())
)).thenReturn(resultIterator);
// test
Iterator<ResourcePersistentId> iterator = myProcessor.getResourcePidIterator(parameters);
// verify
assertNotNull(iterator);
assertTrue(iterator.hasNext());
int count = 0;
while (iterator.hasNext()) {
ResourcePersistentId ret = iterator.next();
assertTrue(
ret.equals(pid) || ret.equals(pid2)
);
count++;
}
assertEquals(2, count);
}
}

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails; import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc; import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc;
import ca.uhn.fhir.mdm.api.IMdmControllerSvc; import ca.uhn.fhir.mdm.api.IMdmControllerSvc;
@ -187,7 +188,8 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
JobInstanceStartRequest request = new JobInstanceStartRequest(); JobInstanceStartRequest request = new JobInstanceStartRequest();
request.setJobDefinitionId(MdmClearAppCtx.JOB_MDM_CLEAR); request.setJobDefinitionId(MdmClearAppCtx.JOB_MDM_CLEAR);
request.setParameters(params); request.setParameters(params);
String id = myJobCoordinator.startInstance(request); Batch2JobStartResponse response = myJobCoordinator.startInstance(request);
String id = response.getJobId();
IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext); IBaseParameters retVal = ParametersUtil.newInstance(myFhirContext);
ParametersUtil.addParameterToParametersString(myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, id); ParametersUtil.addParameterToParametersString(myFhirContext, retVal, ProviderConstants.OPERATION_BATCH_RESPONSE_JOB_ID, id);

View File

@ -14,6 +14,7 @@ import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Practitioner; import org.hl7.fhir.r4.model.Practitioner;
import org.hl7.fhir.r4.model.StringType; import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
@ -121,7 +122,22 @@ public class MdmProviderClearLinkR4Test extends BaseLinkR4Test {
return new SearchParameterMap().setLoadSynchronous(true).add("_tag", new TokenParam(MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_HAPI_MDM_MANAGED)); return new SearchParameterMap().setLoadSynchronous(true).add("_tag", new TokenParam(MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_HAPI_MDM_MANAGED));
} }
@Test @Tag("Intermittent")
// TODO KHS I know intermittent tags aren't used by hapi but this will help me find this test when I review intermittents.
// Last time this test failed, this is what was in the logs:
// 2022-07-17 19:57:27.103 [main] INFO c.u.f.batch2.channel.BatchJobSender [BatchJobSender.java:43] Sending work notification for job[MDM_CLEAR] instance[6f6d6fc5-f74a-426f-b215-7a383893f4bc] step[generate-ranges] chunk[219e29d5-1ee7-47dd-99a1-c636b1b221ae]
// 2022-07-17 19:57:27.193 [batch2-work-notification-1] INFO c.u.f.m.b.MdmGenerateRangeChunksStep [MdmGenerateRangeChunksStep.java:49] Initiating mdm clear of [Patient]] Golden Resources from Sat Jan 01 00:00:00 UTC 2000 to Sun Jul 17 19:57:27 UTC 2022
// 2022-07-17 19:57:27.275 [batch2-work-notification-1] INFO c.u.f.m.b.MdmGenerateRangeChunksStep [MdmGenerateRangeChunksStep.java:49] Initiating mdm clear of [Practitioner]] Golden Resources from Sat Jan 01 00:00:00 UTC 2000 to Sun Jul 17 19:57:27 UTC 2022
// 2022-07-17 19:57:27.381 [awaitility-thread] INFO c.u.f.b.p.JobInstanceProgressCalculator [JobInstanceProgressCalculator.java:67] Job 6f6d6fc5-f74a-426f-b215-7a383893f4bc of type MDM_CLEAR has status IN_PROGRESS - 0 records processed (null/sec) - ETA: null
// 2022-07-17 19:57:27.510 [awaitility-thread] INFO c.u.f.b.p.JobInstanceProgressCalculator [JobInstanceProgressCalculator.java:67] Job 6f6d6fc5-f74a-426f-b215-7a383893f4bc of type MDM_CLEAR has status IN_PROGRESS - 0 records processed (null/sec) - ETA: null
// 2022-07-17 19:57:37.175 [awaitility-thread] INFO c.u.f.b.p.JobInstanceProgressCalculator [JobInstanceProgressCalculator.java:67] Job 6f6d6fc5-f74a-426f-b215-7a383893f4bc of type MDM_CLEAR has status IN_PROGRESS - 0 records processed (null/sec) - ETA: null
// 2022-07-17 19:57:37.329 [main] INFO c.u.f.m.r.config.MdmRuleValidator [MdmRuleValidator.java:116] Validating MDM types [Patient, Practitioner, Medication]
// 2022-07-17 19:57:37.330 [main] INFO c.u.f.m.r.config.MdmRuleValidator [MdmRuleValidator.java:133] Validating search parameters [ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson@799225ca, ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson@f03b50d, ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson@5f19ad6b, ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson@4976b9, ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson@681dbf0f]
// 2022-07-17 19:57:37.330 [main] INFO c.u.f.m.r.config.MdmRuleValidator [MdmRuleValidator.java:161] Validating match fields [ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson@7aa4d4dc, ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson@68444c16, ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson@23f30319, ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson@261325af, ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson@7acd1785, ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson@30a3d036, ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson@bf3e6f0]
// 2022-07-17 19:57:37.330 [main] INFO c.u.f.m.r.config.MdmRuleValidator [MdmRuleValidator.java:253] Validating system URI http://company.io/fhir/NamingSystem/custom-eid-system
// 2022-07-17 19:57:37.335 [main] INFO c.u.f.j.s.r.ResourceReindexingSvcImpl [ResourceReindexingSvcImpl.java:235] Cancelling and purging all resource reindexing jobs
// @Test
public void testGoldenResourceWithCircularReferenceCanBeCleared() { public void testGoldenResourceWithCircularReferenceCanBeCleared() {
Patient patientAndUpdateLinks = createPatientAndUpdateLinks(buildPaulPatient()); Patient patientAndUpdateLinks = createPatientAndUpdateLinks(buildPaulPatient());
Patient patientAndUpdateLinks1 = createPatientAndUpdateLinks(buildJanePatient()); Patient patientAndUpdateLinks1 = createPatientAndUpdateLinks(buildJanePatient());

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.IJobMaintenanceService; import ca.uhn.fhir.batch2.api.IJobMaintenanceService;
import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import org.awaitility.core.ConditionTimeoutException; import org.awaitility.core.ConditionTimeoutException;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -47,6 +48,10 @@ public class Batch2JobHelper {
@Autowired @Autowired
private IJobCoordinator myJobCoordinator; private IJobCoordinator myJobCoordinator;
public JobInstance awaitJobCompletion(Batch2JobStartResponse theStartResponse) {
return awaitJobCompletion(theStartResponse.getJobId());
}
public JobInstance awaitJobCompletion(String theId) { public JobInstance awaitJobCompletion(String theId) {
await().until(() -> { await().until(() -> {
myJobMaintenanceService.runMaintenancePass(); myJobMaintenanceService.runMaintenancePass();
@ -55,10 +60,18 @@ public class Batch2JobHelper {
return myJobCoordinator.getInstance(theId); return myJobCoordinator.getInstance(theId);
} }
public void awaitSingleChunkJobCompletion(Batch2JobStartResponse theStartResponse) {
awaitSingleChunkJobCompletion(theStartResponse.getJobId());
}
public void awaitSingleChunkJobCompletion(String theId) { public void awaitSingleChunkJobCompletion(String theId) {
await().until(() -> myJobCoordinator.getInstance(theId).getStatus() == StatusEnum.COMPLETED); await().until(() -> myJobCoordinator.getInstance(theId).getStatus() == StatusEnum.COMPLETED);
} }
public JobInstance awaitJobFailure(Batch2JobStartResponse theStartResponse) {
return awaitJobFailure(theStartResponse.getJobId());
}
public JobInstance awaitJobFailure(String theId) { public JobInstance awaitJobFailure(String theId) {
await().until(() -> { await().until(() -> {
myJobMaintenanceService.runMaintenancePass(); myJobMaintenanceService.runMaintenancePass();

View File

@ -23,6 +23,7 @@ import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.jpa.subscription.channel.api.ChannelConsumerSettings; import ca.uhn.fhir.jpa.subscription.channel.api.ChannelConsumerSettings;
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelFactory; import ca.uhn.fhir.jpa.subscription.channel.api.IChannelFactory;
import ca.uhn.fhir.jpa.subscription.channel.impl.LinkedBlockingChannel; import ca.uhn.fhir.jpa.subscription.channel.impl.LinkedBlockingChannel;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.test.Batch2JobHelper; import ca.uhn.fhir.jpa.test.Batch2JobHelper;
import ca.uhn.fhir.model.api.IModelJson; import ca.uhn.fhir.model.api.IModelJson;
@ -111,10 +112,10 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
JobInstanceStartRequest request = buildRequest(jobId); JobInstanceStartRequest request = buildRequest(jobId);
myFirstStepLatch.setExpectedCount(1); myFirstStepLatch.setExpectedCount(1);
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
myFirstStepLatch.awaitExpected(); myFirstStepLatch.awaitExpected();
myBatch2JobHelper.awaitSingleChunkJobCompletion(instanceId); myBatch2JobHelper.awaitSingleChunkJobCompletion(startResponse.getJobId());
} }
@Test @Test
@ -135,13 +136,13 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
myFirstStepLatch.setExpectedCount(1); myFirstStepLatch.setExpectedCount(1);
myLastStepLatch.setExpectedCount(1); myLastStepLatch.setExpectedCount(1);
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
myFirstStepLatch.awaitExpected(); myFirstStepLatch.awaitExpected();
myBatch2JobHelper.assertNoGatedStep(instanceId); myBatch2JobHelper.assertNoGatedStep(startResponse.getJobId());
// Since there was only one chunk, the job should proceed without requiring a maintenance pass // Since there was only one chunk, the job should proceed without requiring a maintenance pass
myBatch2JobHelper.awaitSingleChunkJobCompletion(instanceId); myBatch2JobHelper.awaitSingleChunkJobCompletion(startResponse.getJobId());
myLastStepLatch.awaitExpected(); myLastStepLatch.awaitExpected();
} }
@ -178,7 +179,8 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
myFirstStepLatch.setExpectedCount(1); myFirstStepLatch.setExpectedCount(1);
myLastStepLatch.setExpectedCount(1); myLastStepLatch.setExpectedCount(1);
calledLatch.setExpectedCount(1); calledLatch.setExpectedCount(1);
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = startResponse.getJobId();
myFirstStepLatch.awaitExpected(); myFirstStepLatch.awaitExpected();
calledLatch.awaitExpected(); calledLatch.awaitExpected();
@ -283,9 +285,11 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
// run test // run test
JobInstanceStartRequest request = buildRequest(jobId); JobInstanceStartRequest request = buildRequest(jobId);
myFirstStepLatch.setExpectedCount(1); myFirstStepLatch.setExpectedCount(1);
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = startResponse.getJobId();
myFirstStepLatch.awaitExpected(); myFirstStepLatch.awaitExpected();
myBatch2JobHelper.awaitGatedStepId(FIRST_STEP_ID, instanceId); myBatch2JobHelper.awaitGatedStepId(FIRST_STEP_ID, instanceId);
// wait for last step to finish // wait for last step to finish
@ -327,7 +331,8 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
JobInstanceStartRequest request = buildRequest(jobId); JobInstanceStartRequest request = buildRequest(jobId);
myFirstStepLatch.setExpectedCount(1); myFirstStepLatch.setExpectedCount(1);
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = startResponse.getJobId();
myFirstStepLatch.awaitExpected(); myFirstStepLatch.awaitExpected();
myBatch2JobHelper.awaitGatedStepId(FIRST_STEP_ID, instanceId); myBatch2JobHelper.awaitGatedStepId(FIRST_STEP_ID, instanceId);
@ -354,7 +359,8 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
JobInstanceStartRequest request = buildRequest(jobId); JobInstanceStartRequest request = buildRequest(jobId);
// execute // execute
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = startResponse.getJobId();
// validate // validate
myBatch2JobHelper.awaitJobFailure(instanceId); myBatch2JobHelper.awaitJobFailure(instanceId);
@ -378,7 +384,8 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
// execute // execute
myFirstStepLatch.setExpectedCount(1); myFirstStepLatch.setExpectedCount(1);
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = startResponse.getJobId();
myFirstStepLatch.awaitExpected(); myFirstStepLatch.awaitExpected();
// validate // validate
@ -441,8 +448,8 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
// test // test
JobInstanceStartRequest request = buildRequest(jobId); JobInstanceStartRequest request = buildRequest(jobId);
myFirstStepLatch.setExpectedCount(1); myFirstStepLatch.setExpectedCount(1);
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse response = myJobCoordinator.startInstance(request);
JobInstance instance = myBatch2JobHelper.awaitJobHitsStatusInTime(instanceId, JobInstance instance = myBatch2JobHelper.awaitJobHitsStatusInTime(response.getJobId(),
12, // we want to wait a long time (2 min here) cause backoff is incremental 12, // we want to wait a long time (2 min here) cause backoff is incremental
StatusEnum.FAILED, StatusEnum.ERRORED // error states StatusEnum.FAILED, StatusEnum.ERRORED // error states
); );

View File

@ -1,15 +1,17 @@
package ca.uhn.fhir.jpa.bulk; package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2BaseJobParameters;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.client.apache.ResourceEntity; import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
@ -17,7 +19,6 @@ import ca.uhn.fhir.test.utilities.JettyUtil;
import ca.uhn.fhir.util.JsonUtil; import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.collect.Sets;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
@ -37,13 +38,18 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
import org.mockito.Captor; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
@ -54,11 +60,11 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.nullable;
import static org.mockito.Mockito.eq; import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -70,17 +76,17 @@ public class BulkDataExportProviderTest {
private static final String GROUP_ID = "Group/G2401"; private static final String GROUP_ID = "Group/G2401";
private static final String G_JOB_ID = "0000000-GGGGGG"; private static final String G_JOB_ID = "0000000-GGGGGG";
private Server myServer; private Server myServer;
@Spy
private final FhirContext myCtx = FhirContext.forR4Cached(); private final FhirContext myCtx = FhirContext.forR4Cached();
private int myPort; private int myPort;
@Mock @Mock
private IBulkDataExportSvc myBulkDataExportSvc; private IBatch2JobRunner myJobRunner;
@Mock
private IInterceptorBroadcaster myInterceptorBroadcaster;
private CloseableHttpClient myClient; private CloseableHttpClient myClient;
@Captor
private ArgumentCaptor<BulkDataExportOptions> myBulkDataExportOptionsCaptor; @InjectMocks
@Captor private BulkDataExportProvider myProvider;
private ArgumentCaptor<Boolean> myBooleanArgumentCaptor;
@AfterEach @AfterEach
public void after() throws Exception { public void after() throws Exception {
@ -92,13 +98,9 @@ public class BulkDataExportProviderTest {
public void start() throws Exception { public void start() throws Exception {
myServer = new Server(0); myServer = new Server(0);
BulkDataExportProvider provider = new BulkDataExportProvider();
provider.setBulkDataExportSvcForUnitTests(myBulkDataExportSvc);
provider.setFhirContextForUnitTest(myCtx);
ServletHandler proxyHandler = new ServletHandler(); ServletHandler proxyHandler = new ServletHandler();
RestfulServer servlet = new RestfulServer(myCtx); RestfulServer servlet = new RestfulServer(myCtx);
servlet.registerProvider(provider); servlet.registerProvider(myProvider);
ServletHolder servletHolder = new ServletHolder(servlet); ServletHolder servletHolder = new ServletHolder(servlet);
proxyHandler.addServletWithMapping(servletHolder, "/*"); proxyHandler.addServletWithMapping(servletHolder, "/*");
myServer.setHandler(proxyHandler); myServer.setHandler(proxyHandler);
@ -109,26 +111,46 @@ public class BulkDataExportProviderTest {
HttpClientBuilder builder = HttpClientBuilder.create(); HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager); builder.setConnectionManager(connectionManager);
myClient = builder.build(); myClient = builder.build();
}
private BulkExportParameters verifyJobStart() {
ArgumentCaptor<Batch2BaseJobParameters> startJobCaptor = ArgumentCaptor.forClass(Batch2BaseJobParameters.class);
verify(myJobRunner).startNewJob(startJobCaptor.capture());
Batch2BaseJobParameters sp = startJobCaptor.getValue();
assertTrue(sp instanceof BulkExportParameters);
return (BulkExportParameters) sp;
}
private Batch2JobStartResponse createJobStartResponse(String theJobId) {
Batch2JobStartResponse response = new Batch2JobStartResponse();
response.setJobId(theJobId);
return response;
}
private Batch2JobStartResponse createJobStartResponse() {
return createJobStartResponse(A_JOB_ID);
} }
@Test @Test
public void testSuccessfulInitiateBulkRequest_Post() throws IOException { public void testSuccessfulInitiateBulkRequest_Post() throws IOException {
String patientResource = "Patient";
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() String practitionerResource = "Practitioner";
.setJobId(A_JOB_ID); String filter = "Patient?identifier=foo";
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo); when(myJobRunner.startNewJob(any()))
.thenReturn(createJobStartResponse());
InstantType now = InstantType.now(); InstantType now = InstantType.now();
Parameters input = new Parameters(); Parameters input = new Parameters();
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType(patientResource + ", " + practitionerResource));
input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now); input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now);
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Patient?identifier=foo")); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType(filter));
ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
// test
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input)); post.setEntity(new ResourceEntity(myCtx, input));
@ -141,21 +163,18 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class)); BulkExportParameters params = verifyJobStart();
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(2, params.getResourceTypes().size());
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertTrue(params.getResourceTypes().contains(patientResource));
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); assertTrue(params.getResourceTypes().contains(practitionerResource));
assertThat(options.getSince(), notNullValue()); assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo")); assertNotNull(params.getStartDate());
assertTrue(params.getFilters().contains(filter));
} }
@Test @Test
public void testSuccessfulInitiateBulkRequest_Get() throws IOException { public void testSuccessfulInitiateBulkRequest_Get() throws IOException {
when(myJobRunner.startNewJob(any())).thenReturn(createJobStartResponse());
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID);
when(myBulkDataExportSvc.submitJob(any(),any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
InstantType now = InstantType.now(); InstantType now = InstantType.now();
@ -176,21 +195,17 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class)); BulkExportParameters params = verifyJobStart();;
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(params.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); assertThat(params.getStartDate(), notNullValue());
assertThat(options.getSince(), notNullValue()); assertThat(params.getFilters(), containsInAnyOrder("Patient?identifier=foo"));
assertThat(options.getFilters(), containsInAnyOrder("Patient?identifier=foo"));
} }
@Test @Test
public void testSuccessfulInitiateBulkRequest_Get_MultipleTypeFilters() throws IOException { public void testSuccessfulInitiateBulkRequest_Get_MultipleTypeFilters() throws IOException {
when(myJobRunner.startNewJob(any()))
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() .thenReturn(createJobStartResponse());
.setJobId(A_JOB_ID);
when(myBulkDataExportSvc.submitJob(any(),any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
@ -209,23 +224,26 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class)); BulkExportParameters params = verifyJobStart();;
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, params.getOutputFormat());
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(params.getResourceTypes(), containsInAnyOrder("Patient", "EpisodeOfCare"));
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "EpisodeOfCare")); assertThat(params.getStartDate(), nullValue());
assertThat(options.getSince(), nullValue()); assertThat(params.getFilters(), containsInAnyOrder("Patient?_id=P999999990", "EpisodeOfCare?patient=P999999990"));
assertThat(options.getFilters(), containsInAnyOrder("Patient?_id=P999999990", "EpisodeOfCare?patient=P999999990"));
} }
@Test @Test
public void testPollForStatus_BUILDING() throws IOException { public void testPollForStatus_QUEUED() throws IOException {
// setup
Batch2JobInfo info = new Batch2JobInfo();
info.setJobId(A_JOB_ID);
info.setStatus(BulkExportJobStatusEnum.BUILDING);
info.setEndTime(new Date());
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() // when
.setJobId(A_JOB_ID) when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
.setStatus(BulkExportJobStatusEnum.BUILDING) .thenReturn(info);
.setStatusTime(InstantType.now().getValue());
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
// test
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url); HttpGet get = new HttpGet(url);
@ -236,21 +254,25 @@ public class BulkDataExportProviderTest {
assertEquals(202, response.getStatusLine().getStatusCode()); assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase()); assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
assertEquals("120", response.getFirstHeader(Constants.HEADER_RETRY_AFTER).getValue()); assertEquals("120", response.getFirstHeader(Constants.HEADER_RETRY_AFTER).getValue());
assertThat(response.getFirstHeader(Constants.HEADER_X_PROGRESS).getValue(), containsString("Build in progress - Status set to BUILDING at 20")); assertThat(response.getFirstHeader(Constants.HEADER_X_PROGRESS).getValue(),
containsString("Build in progress - Status set to " + info.getStatus() + " at 20"));
} }
} }
@Test @Test
public void testPollForStatus_ERROR() throws IOException { public void testPollForStatus_ERROR() throws IOException {
// setup
Batch2JobInfo info = new Batch2JobInfo();
info.setJobId(A_JOB_ID);
info.setStatus(BulkExportJobStatusEnum.ERROR);
info.setStartTime(new Date());
info.setErrorMsg("Some Error Message");
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() // when
.setJobId(A_JOB_ID) when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
.setStatus(BulkExportJobStatusEnum.ERROR) .thenReturn(info);
.setStatusTime(InstantType.now().getValue())
.setStatusMessage("Some Error Message");
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
// call
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url); HttpGet get = new HttpGet(url);
@ -265,21 +287,30 @@ public class BulkDataExportProviderTest {
ourLog.info("Response content: {}", responseContent); ourLog.info("Response content: {}", responseContent);
assertThat(responseContent, containsString("\"diagnostics\": \"Some Error Message\"")); assertThat(responseContent, containsString("\"diagnostics\": \"Some Error Message\""));
} }
} }
@Test @Test
public void testPollForStatus_COMPLETED() throws IOException { public void testPollForStatus_COMPLETED() throws IOException {
// setup
Batch2JobInfo info = new Batch2JobInfo();
info.setJobId(A_JOB_ID);
info.setStatus(BulkExportJobStatusEnum.COMPLETE);
info.setEndTime(InstantType.now().getValue());
ArrayList<String> ids = new ArrayList<>();
ids.add(new IdType("Binary/111").getValueAsString());
ids.add(new IdType("Binary/222").getValueAsString());
ids.add(new IdType("Binary/333").getValueAsString());
BulkExportJobResults results = new BulkExportJobResults();
HashMap<String, List<String>> map = new HashMap<>();
map.put("Patient", ids);
results.setResourceTypeToBinaryIds(map);
info.setReport(JsonUtil.serialize(results));
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() // when
.setJobId(A_JOB_ID) when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
.setStatus(BulkExportJobStatusEnum.COMPLETE) .thenReturn(info);
.setStatusTime(InstantType.now().getValue());
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111"));
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222"));
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/333"));
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
// call
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url); HttpGet get = new HttpGet(url);
@ -305,9 +336,51 @@ public class BulkDataExportProviderTest {
} }
@Test @Test
public void testPollForStatus_Gone() throws IOException { public void testExportWhenNoResourcesReturned() throws IOException {
// setup
String msg = "Some msg";
Batch2JobInfo info = new Batch2JobInfo();
info.setJobId(A_JOB_ID);
info.setStatus(BulkExportJobStatusEnum.COMPLETE);
info.setEndTime(InstantType.now().getValue());
ArrayList<String> ids = new ArrayList<>();
BulkExportJobResults results = new BulkExportJobResults();
HashMap<String, List<String>> map = new HashMap<>();
map.put("Patient", ids);
results.setResourceTypeToBinaryIds(map);
results.setReportMsg(msg);
info.setReport(JsonUtil.serialize(results));
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenThrow(new ResourceNotFoundException("Unknown job: AAA")); // when
when(myJobRunner.getJobInfo(eq(A_JOB_ID)))
.thenReturn(info);
// test
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
try (CloseableHttpResponse response = myClient.execute(get)) {
ourLog.info("Response: {}", response.toString());
assertEquals(200, response.getStatusLine().getStatusCode());
assertEquals("OK", response.getStatusLine().getReasonPhrase());
assertEquals(Constants.CT_JSON, response.getEntity().getContentType().getValue());
String responseContent = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
ourLog.info("Response content: {}", responseContent);
BulkExportResponseJson responseJson = JsonUtil.deserialize(responseContent, BulkExportResponseJson.class);
assertEquals(msg, responseJson.getMsg());
}
}
@Test
public void testPollForStatus_Gone() throws IOException {
// setup
// when
when(myJobRunner.getJobInfo(anyString()))
.thenThrow(new ResourceNotFoundException("Unknown job: AAA"));
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID; JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
@ -322,7 +395,6 @@ public class BulkDataExportProviderTest {
assertEquals(Constants.CT_FHIR_JSON_NEW, response.getEntity().getContentType().getValue().replaceAll(";.*", "").trim()); assertEquals(Constants.CT_FHIR_JSON_NEW, response.getEntity().getContentType().getValue().replaceAll(";.*", "").trim());
assertThat(responseContent, containsString("\"diagnostics\":\"Unknown job: AAA\"")); assertThat(responseContent, containsString("\"diagnostics\":\"Unknown job: AAA\""));
} }
} }
/** /**
@ -336,14 +408,12 @@ public class BulkDataExportProviderTest {
@Test @Test
public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException { public void testSuccessfulInitiateGroupBulkRequest_Post() throws IOException {
// when
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID); when(myJobRunner.startNewJob(any()))
when(myBulkDataExportSvc.submitJob(any(),any(), nullable(RequestDetails.class))).thenReturn(jobInfo); .thenReturn(createJobStartResponse(G_JOB_ID));
when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Observation", "DiagnosticReport"));
InstantType now = InstantType.now(); InstantType now = InstantType.now();
Parameters input = new Parameters(); Parameters input = new Parameters();
StringType obsTypeFilter = new StringType("Observation?code=OBSCODE,DiagnosticReport?code=DRCODE"); StringType obsTypeFilter = new StringType("Observation?code=OBSCODE,DiagnosticReport?code=DRCODE");
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
@ -354,6 +424,7 @@ public class BulkDataExportProviderTest {
ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
// call
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT); HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input)); post.setEntity(new ResourceEntity(myCtx, input));
@ -365,22 +436,21 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class)); // verify
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); BulkExportParameters bp = verifyJobStart();
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
assertThat(options.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport")); assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertThat(options.getSince(), notNullValue()); assertThat(bp.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport"));
assertThat(options.getFilters(), notNullValue()); assertThat(bp.getStartDate(), notNullValue());
assertEquals(GROUP_ID, options.getGroupId().getValue()); assertThat(bp.getFilters(), notNullValue());
assertThat(options.isExpandMdm(), is(equalTo(true))); assertEquals(GROUP_ID, bp.getGroupId());
assertThat(bp.isExpandMdm(), is(equalTo(true)));
} }
@Test @Test
public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException { public void testSuccessfulInitiateGroupBulkRequest_Get() throws IOException {
// when
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo().setJobId(G_JOB_ID); when(myJobRunner.startNewJob(any())).thenReturn(createJobStartResponse(G_JOB_ID));
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Patient", "Practitioner"));
InstantType now = InstantType.now(); InstantType now = InstantType.now();
@ -391,6 +461,7 @@ public class BulkDataExportProviderTest {
+ "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?identifier=foo|bar") + "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?identifier=foo|bar")
+ "&" + JpaConstants.PARAM_EXPORT_MDM+ "=true"; + "&" + JpaConstants.PARAM_EXPORT_MDM+ "=true";
// call
HttpGet get = new HttpGet(url); HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
ourLog.info("Request: {}", url); ourLog.info("Request: {}", url);
@ -402,24 +473,21 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), any(), nullable(RequestDetails.class)); BulkExportParameters bp = verifyJobStart();
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(bp.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner"));
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient", "Practitioner")); assertThat(bp.getStartDate(), notNullValue());
assertThat(options.getSince(), notNullValue()); assertThat(bp.getFilters(), notNullValue());
assertThat(options.getFilters(), notNullValue()); assertEquals(GROUP_ID, bp.getGroupId());
assertEquals(GROUP_ID, options.getGroupId().getValue()); assertThat(bp.isExpandMdm(), is(equalTo(true)));
assertThat(options.isExpandMdm(), is(equalTo(true)));
} }
@Test @Test
public void testInitiateWithGetAndMultipleTypeFilters() throws IOException { public void testInitiateWithGetAndMultipleTypeFilters() throws IOException {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() // setup
.setJobId(A_JOB_ID);
when(myBulkDataExportSvc.submitJob(any())).thenReturn(jobInfo);
InstantType now = InstantType.now(); InstantType now = InstantType.now();
// manual construct
String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
+ "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation") + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation")
@ -440,20 +508,20 @@ public class BulkDataExportProviderTest {
url += multiValuedTypeFilterBuilder.toString(); url += multiValuedTypeFilterBuilder.toString();
// call
HttpGet get = new HttpGet(url); HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
myClient.execute(get); myClient.execute(get);
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), anyBoolean(), nullable(RequestDetails.class)); // verify
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); BulkExportParameters bp = verifyJobStart();
assertThat(bp.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1));
assertThat(options.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1));
} }
@Test @Test
public void testInitiateGroupExportWithInvalidResourceTypesFails() throws IOException { public void testInitiateGroupExportWithInvalidResourceTypesFails() throws IOException {
when (myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Observation")); // when
String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON) + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
+ "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("StructureDefinition,Observation"); + "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("StructureDefinition,Observation");
@ -463,34 +531,34 @@ public class BulkDataExportProviderTest {
CloseableHttpResponse execute = myClient.execute(get); CloseableHttpResponse execute = myClient.execute(get);
String responseBody = IOUtils.toString(execute.getEntity().getContent()); String responseBody = IOUtils.toString(execute.getEntity().getContent());
// verify
assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400))); assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400)));
assertThat(responseBody, is(containsString("Resource types [StructureDefinition] are invalid for this type of export, as they do not contain search parameters that refer to patients."))); assertThat(responseBody, is(containsString("Resource types [StructureDefinition] are invalid for this type of export, as they do not contain search parameters that refer to patients.")));
} }
@Test @Test
public void testInitiateGroupExportWithNoResourceTypes() throws IOException { public void testInitiateGroupExportWithNoResourceTypes() throws IOException {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() // when
.setJobId(A_JOB_ID); when(myJobRunner.startNewJob(any(Batch2BaseJobParameters.class)))
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo); .thenReturn(createJobStartResponse());
String url = "http://localhost:" + myPort + "/" + "Group/123/" +JpaConstants.OPERATION_EXPORT // test
String url = "http://localhost:" + myPort + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON); + "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON);
HttpGet get = new HttpGet(url); HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
CloseableHttpResponse execute = myClient.execute(get); CloseableHttpResponse execute = myClient.execute(get);
// verify
assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(202))); assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(202)));
verifyJobStart();
} }
@Test @Test
public void testInitiateWithPostAndMultipleTypeFilters() throws IOException { public void testInitiateWithPostAndMultipleTypeFilters() throws IOException {
// when
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() when(myJobRunner.startNewJob(any())).thenReturn(createJobStartResponse());
.setJobId(A_JOB_ID);
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo);
InstantType now = InstantType.now();
Parameters input = new Parameters(); Parameters input = new Parameters();
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
@ -499,6 +567,7 @@ public class BulkDataExportProviderTest {
ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
// call
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input)); post.setEntity(new ResourceEntity(myCtx, input));
@ -511,19 +580,18 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture(), nullable(RequestDetails.class)); // verify
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); BulkExportParameters bp = verifyJobStart();
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertThat(options.getResourceTypes(), containsInAnyOrder("Patient")); assertThat(bp.getResourceTypes(), containsInAnyOrder("Patient"));
assertThat(options.getFilters(), containsInAnyOrder("Patient?gender=male", "Patient?gender=female")); assertThat(bp.getFilters(), containsInAnyOrder("Patient?gender=male", "Patient?gender=female"));
} }
@Test @Test
public void testInitiatePatientExportRequest() throws IOException { public void testInitiatePatientExportRequest() throws IOException {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() // when
.setJobId(A_JOB_ID); when(myJobRunner.startNewJob(any()))
when(myBulkDataExportSvc.submitJob(any(), any(), nullable(RequestDetails.class))).thenReturn(jobInfo); .thenReturn(createJobStartResponse());
when(myBulkDataExportSvc.getPatientCompartmentResources()).thenReturn(Sets.newHashSet("Immunization", "Observation"));
InstantType now = InstantType.now(); InstantType now = InstantType.now();
@ -535,6 +603,7 @@ public class BulkDataExportProviderTest {
ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input)); ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
// call
HttpPost post = new HttpPost("http://localhost:" + myPort + "/Patient/" + JpaConstants.OPERATION_EXPORT); HttpPost post = new HttpPost("http://localhost:" + myPort + "/Patient/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input)); post.setEntity(new ResourceEntity(myCtx, input));
@ -547,25 +616,28 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
verify(myBulkDataExportSvc, times(1)).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture(), nullable(RequestDetails.class)); BulkExportParameters bp = verifyJobStart();
BulkDataExportOptions options = myBulkDataExportOptionsCaptor.getValue(); assertEquals(Constants.CT_FHIR_NDJSON, bp.getOutputFormat());
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat()); assertThat(bp.getResourceTypes(), containsInAnyOrder("Immunization", "Observation"));
assertThat(options.getResourceTypes(), containsInAnyOrder("Immunization", "Observation")); assertThat(bp.getStartDate(), notNullValue());
assertThat(options.getSince(), notNullValue()); assertThat(bp.getFilters(), containsInAnyOrder("Immunization?vaccine-code=foo"));
assertThat(options.getFilters(), containsInAnyOrder("Immunization?vaccine-code=foo"));
} }
@Test @Test
public void testProviderProcessesNoCacheHeader() throws IOException { public void testProviderProcessesNoCacheHeader() throws IOException {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() // setup
.setJobId(A_JOB_ID); Batch2JobStartResponse startResponse = createJobStartResponse();
when(myBulkDataExportSvc.submitJob(any(), anyBoolean(), nullable(RequestDetails.class))).thenReturn(jobInfo); startResponse.setUsesCachedResult(true);
// when
when(myJobRunner.startNewJob(any(Batch2BaseJobParameters.class)))
.thenReturn(startResponse);
Parameters input = new Parameters(); Parameters input = new Parameters();
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON)); input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner")); input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner"));
// call
HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT); HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC); post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_NO_CACHE); post.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_NO_CACHE);
@ -578,10 +650,9 @@ public class BulkDataExportProviderTest {
assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()); assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
} }
// verify
verify(myBulkDataExportSvc).submitJob(myBulkDataExportOptionsCaptor.capture(), myBooleanArgumentCaptor.capture(), nullable(RequestDetails.class)); BulkExportParameters parameters = verifyJobStart();
Boolean usedCache = myBooleanArgumentCaptor.getValue(); assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false)));
assertThat(usedCache, is(equalTo(false)));
} }
} }

View File

@ -10,6 +10,7 @@ import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository; import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
@ -91,7 +92,8 @@ public class BulkImportR4Test extends BaseJpaR4Test {
// Execute // Execute
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = startResponse.getJobId();
assertThat(instanceId, not(blankOrNullString())); assertThat(instanceId, not(blankOrNullString()));
ourLog.info("Execution got ID: {}", instanceId); ourLog.info("Execution got ID: {}", instanceId);
@ -144,7 +146,8 @@ public class BulkImportR4Test extends BaseJpaR4Test {
// Execute // Execute
String instanceId = myJobCoordinator.startInstance(request); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = startResponse.getJobId();
assertThat(instanceId, not(blankOrNullString())); assertThat(instanceId, not(blankOrNullString()));
ourLog.info("Execution got ID: {}", instanceId); ourLog.info("Execution got ID: {}", instanceId);
@ -215,8 +218,8 @@ public class BulkImportR4Test extends BaseJpaR4Test {
request.setParameters(parameters); request.setParameters(parameters);
// Execute // Execute
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = myJobCoordinator.startInstance(request); String instanceId = startResponse.getJobId();
assertThat(instanceId, not(blankOrNullString())); assertThat(instanceId, not(blankOrNullString()));
ourLog.info("Execution got ID: {}", instanceId); ourLog.info("Execution got ID: {}", instanceId);
@ -258,8 +261,8 @@ public class BulkImportR4Test extends BaseJpaR4Test {
try { try {
// Execute // Execute
Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(request);
String instanceId = myJobCoordinator.startInstance(request); String instanceId = startResponse.getJobId();
assertThat(instanceId, not(blankOrNullString())); assertThat(instanceId, not(blankOrNullString()));
ourLog.info("Execution got ID: {}", instanceId); ourLog.info("Execution got ID: {}", instanceId);

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx; import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobParameters; import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobParameters;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.test.Batch2JobHelper; import ca.uhn.fhir.jpa.test.Batch2JobHelper;
@ -63,8 +64,8 @@ public class DeleteExpungeJobTest extends BaseJpaR4Test {
startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE); startRequest.setJobDefinitionId(DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE);
// execute // execute
String jobId = myJobCoordinator.startInstance(startRequest); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
myBatch2JobHelper.awaitJobCompletion(jobId); myBatch2JobHelper.awaitJobCompletion(startResponse);
// validate // validate
assertEquals(1, myObservationDao.search(SearchParameterMap.newSynchronous()).size()); assertEquals(1, myObservationDao.search(SearchParameterMap.newSynchronous()).size());

View File

@ -8,6 +8,7 @@ import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test; import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
@ -61,8 +62,8 @@ public class ReindexJobTest extends BaseJpaR4Test {
JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
startRequest.setParameters(parameters); startRequest.setParameters(parameters);
String id = myJobCoordinator.startInstance(startRequest); Batch2JobStartResponse res = myJobCoordinator.startInstance(startRequest);
myBatch2JobHelper.awaitSingleChunkJobCompletion(id); myBatch2JobHelper.awaitSingleChunkJobCompletion(res);
// validate // validate
assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size()); assertEquals(2, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
@ -94,8 +95,8 @@ public class ReindexJobTest extends BaseJpaR4Test {
JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
startRequest.setParameters(new ReindexJobParameters()); startRequest.setParameters(new ReindexJobParameters());
String id = myJobCoordinator.startInstance(startRequest); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
myBatch2JobHelper.awaitSingleChunkJobCompletion(id); myBatch2JobHelper.awaitSingleChunkJobCompletion(startResponse);
// validate // validate
assertEquals(50, myObservationDao.search(SearchParameterMap.newSynchronous()).size()); assertEquals(50, myObservationDao.search(SearchParameterMap.newSynchronous()).size());
@ -123,8 +124,8 @@ public class ReindexJobTest extends BaseJpaR4Test {
JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
startRequest.setParameters(new ReindexJobParameters()); startRequest.setParameters(new ReindexJobParameters());
String id = myJobCoordinator.startInstance(startRequest); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
JobInstance outcome = myBatch2JobHelper.awaitJobFailure(id); JobInstance outcome = myBatch2JobHelper.awaitJobFailure(startResponse);
// Verify // Verify
@ -151,8 +152,8 @@ public class ReindexJobTest extends BaseJpaR4Test {
JobInstanceStartRequest startRequest = new JobInstanceStartRequest(); JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); startRequest.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
startRequest.setParameters(new ReindexJobParameters()); startRequest.setParameters(new ReindexJobParameters());
String id = myJobCoordinator.startInstance(startRequest); Batch2JobStartResponse startResponse = myJobCoordinator.startInstance(startRequest);
JobInstance outcome = myBatch2JobHelper.awaitJobFailure(id); JobInstance outcome = myBatch2JobHelper.awaitJobFailure(startResponse);
// Verify // Verify

View File

@ -1,22 +1,20 @@
package ca.uhn.fhir.jpa.interceptor; package ca.uhn.fhir.jpa.interceptor;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.interceptor.api.IInterceptorService; import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test; import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions; import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor; import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.JsonUtil;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Binary; import org.hl7.fhir.r4.model.Binary;
import org.hl7.fhir.r4.model.Coding; import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Observation;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
@ -24,11 +22,12 @@ import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -40,19 +39,11 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseTerminologyTranslationInterceptorTest.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseTerminologyTranslationInterceptorTest.class);
public static final String TEST_OBV_FILTER = "Observation?status=amended"; public static final String TEST_OBV_FILTER = "Observation?status=amended";
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private IInterceptorService myInterceptorBroadcaster;
@Autowired @Autowired
private ResponseTerminologyTranslationInterceptor myResponseTerminologyTranslationInterceptor; private ResponseTerminologyTranslationInterceptor myResponseTerminologyTranslationInterceptor;
@Autowired
private IBulkDataExportSvc myBulkDataExportSvc;
@Autowired
private IBulkDataExportJobSchedulingHelper myBulkDataExportJobSchedulingHelper;
@Autowired @Autowired
private FhirContext myFhirContext; private IBatch2JobRunner myJobRunner;
@BeforeEach @BeforeEach
public void beforeEach() { public void beforeEach() {
@ -204,53 +195,42 @@ public class ResponseTerminologyTranslationInterceptorTest extends BaseResourceP
options.setResourceTypes(Sets.newHashSet("Observation")); options.setResourceTypes(Sets.newHashSet("Observation"));
options.setFilters(Sets.newHashSet(TEST_OBV_FILTER)); options.setFilters(Sets.newHashSet(TEST_OBV_FILTER));
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options, true, mySrd); Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
assertNotNull(jobDetails.getJobId());
// Check the status assertNotNull(startResponse);
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation&_typeFilter=" + UrlUtil.escapeUrlParam(TEST_OBV_FILTER), status.getRequest());
// Run a scheduled pass to build the export // Run a scheduled pass to build the export
myBulkDataExportJobSchedulingHelper.startSubmittedJobs(); myBatch2JobHelper.awaitJobCompletion(startResponse.getJobId());
awaitAllBulkJobCompletions();
// Fetch the job again await().until(() -> myJobRunner.getJobInfo(startResponse.getJobId()).getReport() != null);
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
// Iterate over the files // Iterate over the files
for (IBulkDataExportSvc.FileEntry next : status.getFiles()) { String report = myJobRunner.getJobInfo(startResponse.getJobId()).getReport();
Binary nextBinary = myBinaryDao.read(next.getResourceId()); BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType()); for (Map.Entry<String, List<String>> file : results.getResourceTypeToBinaryIds().entrySet()) {
String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8); String resourceTypeInFile = file.getKey();
ourLog.info("Next contents for type {}:\n{}", next.getResourceType(), nextContents); List<String> binaryIds = file.getValue();
if ("Observation".equals(next.getResourceType())) { assertEquals(1, binaryIds.size());
for (String coding : codingList) { for (String binaryId : binaryIds) {
assertThat(nextContents, containsString(coding)); Binary binary = myBinaryDao.read(new IdType(binaryId));
assertEquals(Constants.CT_FHIR_NDJSON, binary.getContentType());
String contents = new String(binary.getContent(), Constants.CHARSET_UTF8);
ourLog.info("Next contents for type {} :\n{}", binary.getResourceType(), contents);
if ("Observation".equals(resourceTypeInFile)) {
for (String code : codingList) {
assertThat(contents, containsString(code));
}
} else {
fail(resourceTypeInFile);
} }
} else {
fail(next.getResourceType());
} }
} }
assertEquals(1, status.getFiles().size());
} }
@Nonnull @Nonnull
private List<String> toCodeStrings(Observation observation) { private List<String> toCodeStrings(Observation observation) {
return observation.getCode().getCoding().stream().map(t -> "[system=" + t.getSystem() + ", code=" + t.getCode() + ", display=" + t.getDisplay() + "]").collect(Collectors.toList()); return observation.getCode().getCoding().stream().map(t -> "[system=" + t.getSystem() + ", code=" + t.getCode() + ", display=" + t.getDisplay() + "]").collect(Collectors.toList());
} }
private void awaitAllBulkJobCompletions() {
myBatchJobHelper.awaitAllBulkJobCompletions(
BatchConstants.BULK_EXPORT_JOB_NAME,
BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME,
BatchConstants.GROUP_BULK_EXPORT_JOB_NAME,
BatchConstants.DELETE_EXPUNGE_JOB_NAME,
BatchConstants.MDM_CLEAR_JOB_NAME
);
}
} }

View File

@ -1,8 +1,6 @@
package ca.uhn.fhir.jpa.provider.r4; package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest; import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor; import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
@ -14,7 +12,6 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor; import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor;
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException; import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException; import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
@ -25,10 +22,7 @@ import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRuleTester;
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum; import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder; import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder;
import ca.uhn.fhir.rest.server.provider.ProviderConstants; import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
import com.github.jsonldjava.shaded.com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpDelete;
@ -61,7 +55,6 @@ import org.junit.jupiter.api.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mock.web.MockHttpServletRequest;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -80,9 +73,6 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
private static final Logger ourLog = LoggerFactory.getLogger(AuthorizationInterceptorJpaR4Test.class); private static final Logger ourLog = LoggerFactory.getLogger(AuthorizationInterceptorJpaR4Test.class);
@Autowired
private IBulkDataExportSvc myBulkDataExportSvc;
@Autowired @Autowired
private SearchParamMatcher mySearchParamMatcher; private SearchParamMatcher mySearchParamMatcher;
@ -103,297 +93,6 @@ public class AuthorizationInterceptorJpaR4Test extends BaseResourceProviderR4Tes
myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof AuthorizationInterceptor); myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof AuthorizationInterceptor);
} }
@Test
public void testBulkExport_AuthorizeGroupId() {
AuthorizationInterceptor authInterceptor = new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().bulkExport().groupExportOnGroup(new IdType("Group/123")).andThen()
.allow().bulkExport().groupExportOnGroup(new IdType("Group/789")).andThen()
.build();
}
};
myInterceptorRegistry.registerInterceptor(authInterceptor);
/*
* Matching group ID
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/789"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
assertEquals(BulkExportJobStatusEnum.SUBMITTED, jobDetails.getStatus());
}
/*
* Second matching group ID
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/789"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
assertEquals(BulkExportJobStatusEnum.SUBMITTED, jobDetails.getStatus());
}
/*
* Non matching group ID
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/456"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
try {
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
/*
* Non group export
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
try {
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
}
@Test
public void testBulkExport_AuthorizePatientId() {
AuthorizationInterceptor authInterceptor = new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().bulkExport().patientExportOnGroup(new IdType("Group/123")).andThen()
.build();
}
};
myInterceptorRegistry.registerInterceptor(authInterceptor);
/*
* Matching group ID
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/123"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
assertEquals(BulkExportJobStatusEnum.SUBMITTED, jobDetails.getStatus());
}
/*
* Non matching group ID
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/456"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
try {
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
/*
* Non group export
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/456"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
try {
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
}
@Test
public void testBulkExport_AuthorizeSystem() {
AuthorizationInterceptor authInterceptor = new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().bulkExport().systemExport().andThen()
.build();
}
};
myInterceptorRegistry.registerInterceptor(authInterceptor);
/*
* System export
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/456"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
assertEquals(BulkExportJobStatusEnum.SUBMITTED, jobDetails.getStatus());
}
/*
* Patient export
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/456"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
try {
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
}
@Test
public void testBulkExport_AuthorizeAny() {
AuthorizationInterceptor authInterceptor = new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().bulkExport().any().andThen()
.build();
}
};
myInterceptorRegistry.registerInterceptor(authInterceptor);
/*
* System export
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/456"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
assertEquals(BulkExportJobStatusEnum.SUBMITTED, jobDetails.getStatus());
}
/*
* Patient export
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setGroupId(new IdType("Group/456"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.PATIENT);
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
assertEquals(BulkExportJobStatusEnum.SUBMITTED, jobDetails.getStatus());
}
}
@Test
public void testBulkExport_SpecificResourceTypesEnforced() {
AuthorizationInterceptor authInterceptor = new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().bulkExport().systemExport().withResourceTypes(Lists.newArrayList("Patient", "Encounter")).andThen()
.build();
}
};
myInterceptorRegistry.registerInterceptor(authInterceptor);
/*
* Appropriate Resources
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Patient", "Encounter"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
assertEquals(BulkExportJobStatusEnum.SUBMITTED, jobDetails.getStatus());
}
/*
* Inappropriate Resources
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setResourceTypes(Sets.newHashSet("Patient", "Encounter", "Observation"));
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
try {
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
/*
* No Resources
*/
{
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
try {
ServletRequestDetails requestDetails = new ServletRequestDetails().setServletRequest(new MockHttpServletRequest());
myBulkDataExportSvc.submitJob(bulkDataExportOptions, true, requestDetails);
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
}
/** /**
* See #667 * See #667
*/ */

View File

@ -1,9 +1,12 @@
package ca.uhn.fhir.jpa.provider.r4; package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.batch.config.BatchConstants; import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.jpa.entity.PartitionEntity;
@ -11,11 +14,14 @@ import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails; import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions; import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException; import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.test.utilities.ITestDataBuilder; import ca.uhn.fhir.test.utilities.ITestDataBuilder;
import ca.uhn.fhir.util.JsonUtil; import ca.uhn.fhir.util.JsonUtil;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
@ -24,37 +30,51 @@ import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.CapabilityStatement; import org.hl7.fhir.r4.model.CapabilityStatement;
import org.hl7.fhir.r4.model.Condition; import org.hl7.fhir.r4.model.Condition;
import org.hl7.fhir.r4.model.IdType; import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Organization; import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mock.web.MockHttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
@SuppressWarnings("Duplicates") @SuppressWarnings("Duplicates")
public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Test implements ITestDataBuilder { public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Test implements ITestDataBuilder {
@Autowired
private IBulkDataExportSvc myBulkDataExportSvc;
@Autowired @Autowired
private IBulkDataExportJobSchedulingHelper myBulkDataExportJobSchedulingHelper; private IBulkDataExportJobSchedulingHelper myBulkDataExportJobSchedulingHelper;
@ -587,56 +607,6 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
assertThat(response.getEntry(), hasSize(2)); assertThat(response.getEntry(), hasSize(2));
} }
@Test
public void testBulkExportForDifferentPartitions() throws IOException {
setBulkDataExportProvider();
testBulkExport(TENANT_A);
testBulkExport(TENANT_B);
testBulkExport(JpaConstants.DEFAULT_PARTITION_NAME);
}
private void testBulkExport(String createInPartition) throws IOException {
// Create a patient
IBaseResource patientA = buildPatient(withActiveTrue());
SystemRequestDetails requestDetails = new SystemRequestDetails();
requestDetails.setTenantId(createInPartition);
myPatientDao.create((Patient) patientA, requestDetails);
// Create a bulk job
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Sets.newHashSet("Patient"));
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(options, false, requestDetails);
assertNotNull(jobDetails.getJobId());
// Run a scheduled pass to build the export and wait for completion
myBulkDataExportJobSchedulingHelper.startSubmittedJobs();
myBatchJobHelper.awaitAllBulkJobCompletions(
BatchConstants.BULK_EXPORT_JOB_NAME
);
//perform export-poll-status
HttpGet get = new HttpGet(buildExportUrl(createInPartition, jobDetails.getJobId()));
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
BulkExportResponseJson responseJson = JsonUtil.deserialize(responseString, BulkExportResponseJson.class);
assertThat(responseJson.getOutput().get(0).getUrl(), containsString(JpaConstants.DEFAULT_PARTITION_NAME + "/Binary/"));
}
}
private void setBulkDataExportProvider() {
BulkDataExportProvider provider = new BulkDataExportProvider();
provider.setBulkDataExportSvcForUnitTests(myBulkDataExportSvc);
provider.setFhirContextForUnitTest(myFhirContext);
ourRestServer.registerProvider(provider);
}
private String buildExportUrl(String createInPartition, String jobId) {
return myClient.getServerBase() + "/" + createInPartition + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?"
+ JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + jobId;
}
private void createConditionWithAllowedUnqualified(IIdType idA) { private void createConditionWithAllowedUnqualified(IIdType idA) {
myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED); myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
IIdType idB = createResource("Condition", withTenant(TENANT_A), withObservationCode("http://cs", "A")); IIdType idB = createResource("Condition", withTenant(TENANT_A), withObservationCode("http://cs", "A"));
@ -644,4 +614,89 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
theCondition.getSubject().setReference("Patient/" + idA.getIdPart()); theCondition.getSubject().setReference("Patient/" + idA.getIdPart());
doUpdateResource(theCondition); doUpdateResource(theCondition);
} }
@Nested
public class PartitionTesting {
@InjectMocks
private BulkDataExportProvider myProvider;
@Mock
private IBatch2JobRunner myJobRunner;
@Test
public void testBulkExportForDifferentPartitions() throws IOException {
setBulkDataExportProvider();
testBulkExport(TENANT_A);
testBulkExport(TENANT_B);
testBulkExport(JpaConstants.DEFAULT_PARTITION_NAME);
}
private void testBulkExport(String createInPartition) throws IOException {
// setup
String jobId = "jobId";
RestfulServer mockServer = mock(RestfulServer.class);
HttpServletResponse mockResponse = mock(HttpServletResponse.class);
BulkExportJobResults results = new BulkExportJobResults();
HashMap<String, List<String>> map = new HashMap<>();
map.put("Patient", Arrays.asList("Binary/1", "Binary/2"));
results.setResourceTypeToBinaryIds(map);
Batch2JobInfo jobInfo = new Batch2JobInfo();
jobInfo.setJobId(jobId);
jobInfo.setStatus(BulkExportJobStatusEnum.COMPLETE);
jobInfo.setReport(JsonUtil.serialize(results));
// Create a bulk job
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Sets.newHashSet("Patient"));
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
Batch2JobStartResponse startResponse = new Batch2JobStartResponse();
startResponse.setJobId(jobId);
when(myJobRunner.startNewJob(any()))
.thenReturn(startResponse);
when(myJobRunner.getJobInfo(anyString()))
.thenReturn(jobInfo);
// mocking
ServletRequestDetails servletRequestDetails = spy(new ServletRequestDetails());
MockHttpServletRequest reqDetails = new MockHttpServletRequest();
reqDetails.addHeader(Constants.HEADER_PREFER,
"respond-async");
servletRequestDetails.setServletRequest(reqDetails);
doReturn(JpaConstants.DEFAULT_PARTITION_NAME + "/")
.when(servletRequestDetails).getServerBaseForRequest();
when(servletRequestDetails.getServer())
.thenReturn(mockServer);
when(servletRequestDetails.getServletResponse())
.thenReturn(mockResponse);
List<IPrimitiveType<String>> filters = new ArrayList<>();
if (options.getFilters() != null) {
for (String v : options.getFilters()) {
filters.add(new StringType(v));
}
}
//perform export-poll-status
HttpGet get = new HttpGet(buildExportUrl(createInPartition, jobId));
try (CloseableHttpResponse response = ourHttpClient.execute(get)) {
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
BulkExportResponseJson responseJson = JsonUtil.deserialize(responseString, BulkExportResponseJson.class);
assertThat(responseJson.getOutput().get(0).getUrl(), containsString(JpaConstants.DEFAULT_PARTITION_NAME + "/Binary/"));
}
}
@BeforeEach
private void setBulkDataExportProvider() {
ourRestServer.registerProvider(myProvider);
}
private String buildExportUrl(String createInPartition, String jobId) {
return myClient.getServerBase() + "/" + createInPartition + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?"
+ JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + jobId;
}
}
} }

View File

@ -0,0 +1,63 @@
package ca.uhn.fhir.jpa.testutil;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import ca.uhn.fhir.util.SearchParameterUtil;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class BulkExportBatch2TestUtils {
public static BulkExportParameters getBulkExportParametersFromOptions(FhirContext theCtx,
BulkDataExportOptions theOptions) {
BulkExportParameters parameters = new BulkExportParameters(Batch2JobDefinitionConstants.BULK_EXPORT);
parameters.setStartDate(theOptions.getSince());
parameters.setOutputFormat(theOptions.getOutputFormat());
parameters.setExportStyle(theOptions.getExportStyle());
if (theOptions.getFilters() != null) {
parameters.setFilters(new ArrayList<>(theOptions.getFilters()));
}
if (theOptions.getGroupId() != null) {
parameters.setGroupId(theOptions.getGroupId().getValue());
}
parameters.setExpandMdm(theOptions.isExpandMdm());
// resource types are special
// if none are provided, the job submitter adds them
// but we cannot manually start the job without correct parameters
// so we "correct" them here
if (theOptions.getResourceTypes() == null || theOptions.getResourceTypes().isEmpty()) {
addAllResourceTypes(parameters, theCtx);
}
else {
parameters.setResourceTypes(new ArrayList<>(theOptions.getResourceTypes()));
}
return parameters;
}
private static void addAllResourceTypes(BulkExportParameters theOptions, FhirContext theCtx) {
Set<String> rts = theCtx.getResourceTypes();
if (theOptions.getExportStyle() == BulkDataExportOptions.ExportStyle.SYSTEM) {
// everything
List<String> resourceTypes = rts.stream()
.filter(rt -> !rt.equalsIgnoreCase("Binary"))
.collect(Collectors.toList());
theOptions.setResourceTypes(resourceTypes);
}
else if (theOptions.getExportStyle() != null) {
// patients
List<String> patientRts = rts.stream()
.filter(rt -> SearchParameterUtil.isResourceTypeInPatientCompartment(theCtx, rt))
.collect(Collectors.toList());
theOptions.setResourceTypes(patientRts);
}
}
}

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -7,7 +7,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -35,6 +35,7 @@ public class BulkDataExportOptions {
GROUP, GROUP,
SYSTEM SYSTEM
} }
private String myOutputFormat; private String myOutputFormat;
private Set<String> myResourceTypes; private Set<String> myResourceTypes;
private Date mySince; private Date mySince;
@ -43,8 +44,6 @@ public class BulkDataExportOptions {
private boolean myExpandMdm; private boolean myExpandMdm;
private IIdType myGroupId; private IIdType myGroupId;
public void setOutputFormat(String theOutputFormat) { public void setOutputFormat(String theOutputFormat) {
myOutputFormat = theOutputFormat; myOutputFormat = theOutputFormat;
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId> <artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId> <artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId> <artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId> <artifactId>hapi-fhir-spring-boot</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
</parent> </parent>
<artifactId>hapi-fhir-spring-boot-samples</artifactId> <artifactId>hapi-fhir-spring-boot-samples</artifactId>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
@ -41,47 +41,6 @@
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<profiles>
<!-- For releases, we need to generate javadoc and sources JAR -->
<profile>
<id>release</id>
<activation>
<activeByDefault>false</activeByDefault>
<property>
<name>deployToSonatype</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.4.0</version>
<executions>
<execution>
<id>attach-javadoc</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project> </project>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>6.1.0-PRE14-SNAPSHOT</version> <version>6.1.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -20,6 +20,7 @@ package ca.uhn.fhir.batch2.jobs.config;
* #L% * #L%
*/ */
import ca.uhn.fhir.batch2.jobs.export.BulkExportAppCtx;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx; import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx;
import ca.uhn.fhir.batch2.jobs.imprt.BulkImportAppCtx; import ca.uhn.fhir.batch2.jobs.imprt.BulkImportAppCtx;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
@ -32,8 +33,8 @@ import org.springframework.context.annotation.Import;
BatchCommonCtx.class, BatchCommonCtx.class,
BulkImportAppCtx.class, BulkImportAppCtx.class,
ReindexAppCtx.class, ReindexAppCtx.class,
DeleteExpungeAppCtx.class DeleteExpungeAppCtx.class,
BulkExportAppCtx.class
}) })
public class Batch2JobsConfig { public class Batch2JobsConfig {
// nothing
} }

View File

@ -1,6 +1,28 @@
package ca.uhn.fhir.batch2.jobs.config; package ca.uhn.fhir.batch2.jobs.config;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.jobs.parameters.UrlPartitioner; import ca.uhn.fhir.batch2.jobs.parameters.UrlPartitioner;
import ca.uhn.fhir.batch2.jobs.services.Batch2JobRunnerImpl;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@ -10,4 +32,9 @@ public class BatchCommonCtx {
UrlPartitioner urlPartitioner(MatchUrlService theMatchUrlService, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) { UrlPartitioner urlPartitioner(MatchUrlService theMatchUrlService, IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
return new UrlPartitioner(theMatchUrlService, theRequestPartitionHelperSvc); return new UrlPartitioner(theMatchUrlService, theRequestPartitionHelperSvc);
} }
@Bean
public IBatch2JobRunner batch2JobRunner() {
return new Batch2JobRunnerImpl();
}
} }

View File

@ -0,0 +1,108 @@
package ca.uhn.fhir.batch2.jobs.export;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportExpandedResources;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportIdList;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.batch2.model.JobDefinition;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.model.api.IModelJson;
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
@Configuration
public class BulkExportAppCtx {
@Bean
public JobDefinition bulkExportJobDefinition() {
JobDefinition.Builder<IModelJson, VoidModel> builder = JobDefinition.newBuilder();
builder.setJobDefinitionId(Batch2JobDefinitionConstants.BULK_EXPORT);
builder.setJobDescription("FHIR Bulk Export");
builder.setJobDefinitionVersion(1);
JobDefinition def = builder.setParametersType(BulkExportJobParameters.class)
// validator
.setParametersValidator(bulkExportJobParametersValidator())
.gatedExecution()
// first step - load in (all) ids and create id chunks of 1000 each
.addFirstStep(
"fetch-resources",
"Fetches resource PIDs for exporting",
BulkExportIdList.class,
fetchResourceIdsStep()
)
// expand out - fetch resources
.addIntermediateStep(
"expand-resources",
"Expand out resources",
BulkExportExpandedResources.class,
expandResourcesStep()
)
// write binaries and save to db
.addIntermediateStep(
"write-to-binaries",
"Writes the expanded resources to the binaries and saves",
BulkExportBinaryFileId.class,
writeBinaryStep()
)
// finalize the job (set to complete)
.addFinalReducerStep(
"create-report-step",
"Creates the output report from a bulk export job",
BulkExportJobResults.class,
createReportStep()
)
.build();
return def;
}
@Bean
public BulkExportJobParametersValidator bulkExportJobParametersValidator() {
return new BulkExportJobParametersValidator();
}
@Bean
public FetchResourceIdsStep fetchResourceIdsStep() {
return new FetchResourceIdsStep();
}
@Bean
public ExpandResourcesStep expandResourcesStep() {
return new ExpandResourcesStep();
}
@Bean
public WriteBinaryStep writeBinaryStep() {
return new WriteBinaryStep();
}
@Bean
@Scope("prototype")
public BulkExportCreateReportStep createReportStep() {
return new BulkExportCreateReportStep();
}
}

View File

@ -0,0 +1,87 @@
package ca.uhn.fhir.batch2.jobs.export;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.api.ChunkExecutionDetails;
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.IReductionStepWorker;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.batch2.model.ChunkOutcome;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.slf4j.LoggerFactory.getLogger;
public class BulkExportCreateReportStep implements IReductionStepWorker<BulkExportJobParameters, BulkExportBinaryFileId, BulkExportJobResults> {
private static final Logger ourLog = getLogger(BulkExportCreateReportStep.class);
private Map<String, List<String>> myResourceToBinaryIds;
@NotNull
@Override
public RunOutcome run(@NotNull StepExecutionDetails<BulkExportJobParameters, BulkExportBinaryFileId> theStepExecutionDetails,
@NotNull IJobDataSink<BulkExportJobResults> theDataSink) throws JobExecutionFailedException {
BulkExportJobResults results = new BulkExportJobResults();
if (myResourceToBinaryIds != null) {
ourLog.info("Bulk Export Report creation step");
results.setResourceTypeToBinaryIds(myResourceToBinaryIds);
myResourceToBinaryIds = null;
} else {
String msg = "Export complete, but no data to generate report.";
ourLog.warn(msg);
results.setReportMsg(msg);
}
// accept saves the report
theDataSink.accept(results);
return RunOutcome.SUCCESS;
}
@NotNull
@Override
public ChunkOutcome consume(ChunkExecutionDetails<BulkExportJobParameters,
BulkExportBinaryFileId> theChunkDetails) {
BulkExportBinaryFileId fileId = theChunkDetails.getData();
if (myResourceToBinaryIds == null) {
myResourceToBinaryIds = new HashMap<>();
}
myResourceToBinaryIds.putIfAbsent(fileId.getResourceType(), new ArrayList<>());
myResourceToBinaryIds.get(fileId.getResourceType()).add(fileId.getBinaryId());
return ChunkOutcome.SUCCESS();
}
}

View File

@ -0,0 +1,86 @@
package ca.uhn.fhir.batch2.jobs.export;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.api.IJobParametersValidator;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
public class BulkExportJobParametersValidator implements IJobParametersValidator<BulkExportJobParameters> {
@Autowired
private DaoRegistry myDaoRegistry;
@Nullable
@Override
public List<String> validate(@Nonnull BulkExportJobParameters theParameters) {
List<String> errorMsgs = new ArrayList<>();
// initial validation
if (theParameters.getResourceTypes() == null || theParameters.getResourceTypes().isEmpty()) {
errorMsgs.add("Resource Types are required for an export job.");
}
else {
for (String resourceType : theParameters.getResourceTypes()) {
if (resourceType.equalsIgnoreCase("Binary")) {
errorMsgs.add("Bulk export of Binary resources is forbidden");
} else if (!myDaoRegistry.isResourceTypeSupported(resourceType)) {
errorMsgs.add("Resource type " + resourceType + " is not a supported resource type!");
}
}
}
// validate the output format
if (!Constants.CT_FHIR_NDJSON.equalsIgnoreCase(theParameters.getOutputFormat())) {
errorMsgs.add("The only allowed format for Bulk Export is currently " + Constants.CT_FHIR_NDJSON);
}
// validate for group
BulkDataExportOptions.ExportStyle style = theParameters.getExportStyle();
if (style == null) {
errorMsgs.add("Export style is required");
}
else {
switch (style) {
case GROUP:
if (theParameters.getGroupId() == null || theParameters.getGroupId().isEmpty()) {
errorMsgs.add("Group export requires a group id, but none provided.");
}
break;
case SYSTEM:
case PATIENT:
default:
break;
}
}
return errorMsgs;
}
}

View File

@ -0,0 +1,161 @@
package ca.uhn.fhir.batch2.jobs.export;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.IJobStepWorker;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportExpandedResources;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportIdList;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.models.Id;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationSvc;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.slf4j.LoggerFactory.getLogger;
public class ExpandResourcesStep implements IJobStepWorker<BulkExportJobParameters, BulkExportIdList, BulkExportExpandedResources> {
private static final Logger ourLog = getLogger(ExpandResourcesStep.class);
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private FhirContext myFhirContext;
@Autowired
private IBulkExportProcessor myBulkExportProcessor;
@Autowired
private ApplicationContext myApplicationContext;
private ResponseTerminologyTranslationSvc myResponseTerminologyTranslationSvc;
@PostConstruct
public void initialize() {
boolean includeNonSingletons = false;
// We don't want to init lazy loaded beans here, so we call getBeansOfType with allowEagerInit = false
boolean allowEagerInit = false;
try {
Map<String, ResponseTerminologyTranslationSvc> rttMap = myApplicationContext.getBeansOfType(ResponseTerminologyTranslationSvc.class, includeNonSingletons, allowEagerInit);
Optional<ResponseTerminologyTranslationSvc> oRtt = rttMap.values().stream().findFirst();
if (oRtt.isPresent()) {
myResponseTerminologyTranslationSvc = oRtt.get();
}
} catch (Exception e) {
ourLog.info("Terminology Translation not available on this server. Bulk Export will operate without it.");
}
}
@Nonnull
@Override
public RunOutcome run(@Nonnull StepExecutionDetails<BulkExportJobParameters, BulkExportIdList> theStepExecutionDetails,
@Nonnull IJobDataSink<BulkExportExpandedResources> theDataSink) throws JobExecutionFailedException {
BulkExportIdList idList = theStepExecutionDetails.getData();
BulkExportJobParameters jobParameters = theStepExecutionDetails.getParameters();
ourLog.info("Step 2 for bulk export - Expand resources");
// search the resources
IBundleProvider bundle = fetchAllResources(idList);
List<IBaseResource> allResources = bundle.getAllResources();
// if necessary, expand resources
if (jobParameters.isExpandMdm()) {
myBulkExportProcessor.expandMdmResources(allResources);
}
if (myResponseTerminologyTranslationSvc != null) {
myResponseTerminologyTranslationSvc.processResourcesForTerminologyTranslation(allResources);
}
// encode them
List<String> resources = encodeToString(allResources, jobParameters);
// set to datasink
BulkExportExpandedResources output = new BulkExportExpandedResources();
output.setStringifiedResources(resources);
output.setResourceType(idList.getResourceType());
theDataSink.accept(output);
ourLog.trace("Expanding of {} resources of type {} completed",
idList.getIds().size(),
idList.getResourceType());
// and return
return RunOutcome.SUCCESS;
}
private IBundleProvider fetchAllResources(BulkExportIdList theIds) {
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theIds.getResourceType());
SearchParameterMap map = new SearchParameterMap();
TokenOrListParam ids = new TokenOrListParam();
for (Id id : theIds.getIds()) {
ids.addOr(new TokenParam(id.toPID().getAssociatedResourceId().getValue()));
}
map.add(Constants.PARAM_ID, ids);
return dao.search(map, SystemRequestDetails.forAllPartitions());
}
private List<String> encodeToString(List<IBaseResource> theResources, BulkExportJobParameters theParameters) {
IParser parser = getParser(theParameters);
List<String> resources = new ArrayList<>();
for (IBaseResource resource : theResources) {
String jsonResource = parser.encodeResourceToString(resource);
resources.add(jsonResource);
}
return resources;
}
private IParser getParser(BulkExportJobParameters theParameters) {
// The parser depends on the
// output format
// (but for now, only ndjson is supported
// see WriteBinaryStep as well
return myFhirContext.newJsonParser().setPrettyPrint(false);
}
}

View File

@ -0,0 +1,120 @@
package ca.uhn.fhir.batch2.jobs.export;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.api.IFirstJobStepWorker;
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportIdList;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.batch2.jobs.models.Id;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
import ca.uhn.fhir.jpa.bulk.export.model.ExportPIDIteratorParameters;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class FetchResourceIdsStep implements IFirstJobStepWorker<BulkExportJobParameters, BulkExportIdList> {
private static final Logger ourLog = LoggerFactory.getLogger(FetchResourceIdsStep.class);
public static final int MAX_IDS_TO_BATCH = 1000;
@Autowired
private IBulkExportProcessor myBulkExportProcessor;
@Nonnull
@Override
public RunOutcome run(@Nonnull StepExecutionDetails<BulkExportJobParameters, VoidModel> theStepExecutionDetails,
@Nonnull IJobDataSink<BulkExportIdList> theDataSink) throws JobExecutionFailedException {
BulkExportJobParameters params = theStepExecutionDetails.getParameters();
ourLog.info("Starting BatchExport job");
ExportPIDIteratorParameters providerParams = new ExportPIDIteratorParameters();
providerParams.setFilters(params.getFilters());
providerParams.setStartDate(params.getStartDate());
providerParams.setExportStyle(params.getExportStyle());
providerParams.setGroupId(params.getGroupId());
providerParams.setExpandMdm(params.isExpandMdm());
int submissionCount = 0;
try {
for (String resourceType : params.getResourceTypes()) {
providerParams.setResourceType(resourceType);
// filters are the filters for searching
Iterator<ResourcePersistentId> pidIterator = myBulkExportProcessor.getResourcePidIterator(providerParams);
List<Id> idsToSubmit = new ArrayList<>();
while (pidIterator.hasNext()) {
ResourcePersistentId pid = pidIterator.next();
idsToSubmit.add(Id.getIdFromPID(pid, resourceType));
// >= so that we know (with confidence)
// that every batch is <= 1000 items
if (idsToSubmit.size() >= MAX_IDS_TO_BATCH) {
submitWorkChunk(idsToSubmit, resourceType, params, theDataSink);
submissionCount++;
idsToSubmit = new ArrayList<>();
}
}
// if we have any other Ids left, submit them now
if (!idsToSubmit.isEmpty()) {
submitWorkChunk(idsToSubmit, resourceType, params, theDataSink);
submissionCount++;
idsToSubmit = new ArrayList<>();
}
}
} catch (Exception ex) {
ourLog.error(ex.getMessage());
theDataSink.recoveredError(ex.getMessage());
throw new JobExecutionFailedException(Msg.code(2104) + " : " + ex.getMessage());
}
ourLog.info("Submitted {} groups of ids for processing", submissionCount);
return RunOutcome.SUCCESS;
}
private void submitWorkChunk(List<Id> theIds,
String theResourceType,
BulkExportJobParameters theParams,
IJobDataSink<BulkExportIdList> theDataSink) {
BulkExportIdList idList = new BulkExportIdList();
idList.setIds(theIds);
idList.setResourceType(theResourceType);
theDataSink.accept(idList);
}
}

View File

@ -0,0 +1,125 @@
package ca.uhn.fhir.batch2.jobs.export;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.IJobStepWorker;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportExpandedResources;
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportJobParameters;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.util.BinaryUtil;
import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import static org.slf4j.LoggerFactory.getLogger;
public class WriteBinaryStep implements IJobStepWorker<BulkExportJobParameters, BulkExportExpandedResources, BulkExportBinaryFileId> {
private static final Logger ourLog = getLogger(WriteBinaryStep.class);
@Autowired
private FhirContext myFhirContext;
@Autowired
private DaoRegistry myDaoRegistry;
@Nonnull
@Override
public RunOutcome run(@Nonnull StepExecutionDetails<BulkExportJobParameters, BulkExportExpandedResources> theStepExecutionDetails,
@Nonnull IJobDataSink<BulkExportBinaryFileId> theDataSink) throws JobExecutionFailedException {
BulkExportExpandedResources expandedResources = theStepExecutionDetails.getData();
ourLog.info("Write binary step of Job Export");
@SuppressWarnings("unchecked")
IFhirResourceDao<IBaseBinary> binaryDao = myDaoRegistry.getResourceDao("Binary");
IBaseBinary binary = BinaryUtil.newBinary(myFhirContext);
// TODO
// should be dependent on the
// output format in parameters
// but for now, only NDJSON is supported
binary.setContentType(Constants.CT_FHIR_NDJSON);
int processedRecordsCount = 0;
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
try (OutputStreamWriter streamWriter = getStreamWriter(outputStream)) {
for (String stringified : expandedResources.getStringifiedResources()) {
streamWriter.append(stringified);
streamWriter.append("\n");
processedRecordsCount++;
}
streamWriter.flush();
outputStream.flush();
}
binary.setContent(outputStream.toByteArray());
} catch (IOException ex) {
String errorMsg = String.format("Failure to process resource of type %s : %s",
expandedResources.getResourceType(),
ex.getMessage());
ourLog.error(errorMsg);
throw new JobExecutionFailedException(Msg.code(2105) + errorMsg);
}
DaoMethodOutcome outcome = binaryDao.create(binary,
new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.defaultPartition()));
IIdType id = outcome.getId();
BulkExportBinaryFileId bulkExportBinaryFileId = new BulkExportBinaryFileId();
bulkExportBinaryFileId.setBinaryId(id.getValueAsString());
bulkExportBinaryFileId.setResourceType(expandedResources.getResourceType());
theDataSink.accept(bulkExportBinaryFileId);
ourLog.info("Binary writing complete for {} resources of type {}.",
processedRecordsCount,
expandedResources.getResourceType());
return RunOutcome.SUCCESS;
}
/**
* Returns an output stream writer
* (exposed for testing)
*/
protected OutputStreamWriter getStreamWriter(ByteArrayOutputStream theOutputStream) {
return new OutputStreamWriter(theOutputStream, Constants.CHARSET_UTF8);
}
}

View File

@ -0,0 +1,51 @@
package ca.uhn.fhir.batch2.jobs.export.models;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.fasterxml.jackson.annotation.JsonProperty;
public class BulkExportBinaryFileId extends BulkExportJobBase {
@JsonProperty("binaryId")
private String myBinaryId;
@JsonProperty("resourceType")
private String myResourceType;
public BulkExportBinaryFileId() {
}
public String getBinaryId() {
return myBinaryId;
}
public void setBinaryId(String theBinaryId) {
myBinaryId = theBinaryId;
}
public String getResourceType() {
return myResourceType;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
}

View File

@ -0,0 +1,57 @@
package ca.uhn.fhir.batch2.jobs.export.models;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
public class BulkExportExpandedResources extends BulkExportJobBase {
/**
* List of stringified resources ready for writing
* to a file/binary.
*/
@JsonProperty("resources")
private List<String> myStringifiedResources;
/**
* only a single resource type per batch step
*/
@JsonProperty("resourceType")
private String myResourceType;
public List<String> getStringifiedResources() {
return myStringifiedResources;
}
public void setStringifiedResources(List<String> theStringifiedResources) {
myStringifiedResources = theStringifiedResources;
}
public String getResourceType() {
return myResourceType;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
}

View File

@ -0,0 +1,54 @@
package ca.uhn.fhir.batch2.jobs.export.models;
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.batch2.jobs.models.Id;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
public class BulkExportIdList extends BulkExportJobBase {
/**
* List of Id objects for serialization
*/
@JsonProperty("ids")
private List<Id> myIds;
@JsonProperty("resourceType")
private String myResourceType;
public List<Id> getIds() {
return myIds;
}
public void setIds(List<Id> theIds) {
myIds = theIds;
}
public String getResourceType() {
return myResourceType;
}
public void setResourceType(String theResourceType) {
myResourceType = theResourceType;
}
}

Some files were not shown because too many files have changed in this diff Show More