Add framework for Bulk Import (#2538)

* Start work on bul;k import

* Work on bulk import

* Have batch working

* Working

* Working

* More work

* More work on bulk export

* Address fixmes

* License header updates

* Test fixes

* License header updates

* Test fix

* Test fix

* Version bumps

* Work on config

* Test cleanup

* One more version bump

* Version bump

* CLeanup

* A few additions

* Test fixes

* Test fix

* Test fix

* Migration fix

* Test fix

* Test fix
This commit is contained in:
James Agnew 2021-04-14 17:41:32 -04:00 committed by GitHub
parent 2ebc57b8a2
commit ca2088f3ad
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
151 changed files with 2891 additions and 1279 deletions

View File

@ -31,6 +31,7 @@ charset = utf-8
indent_style = tab indent_style = tab
tab_width = 3 tab_width = 3
indent_size = 3 indent_size = 3
continuation_indent_size=3
ij_java_align_consecutive_assignments = false ij_java_align_consecutive_assignments = false
ij_java_align_consecutive_variable_declarations = false ij_java_align_consecutive_variable_declarations = false
ij_java_align_group_field_declarations = false ij_java_align_group_field_declarations = false

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -156,7 +156,8 @@ public class BundleBuilder {
// Bundle.entry.request.url // Bundle.entry.request.url
IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance(); IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance();
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().getValue()); String resourceType = myContext.getResourceType(theResource);
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().withResourceType(resourceType).getValue());
myEntryRequestUrlChild.getMutator().setValue(request, url); myEntryRequestUrlChild.getMutator().setValue(request, url);
// Bundle.entry.request.url // Bundle.entry.request.url

View File

@ -68,8 +68,8 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri
# JPA Messages # JPA Messages
ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export
ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0} ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath> <relativePath>../../hapi-deployable-pom</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -78,13 +78,13 @@
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu2</artifactId> <artifactId>hapi-fhir-structures-dstu2</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-jpaserver-subscription</artifactId> <artifactId>hapi-fhir-jpaserver-subscription</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
@ -101,7 +101,7 @@
<dependency> <dependency>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-testpage-overlay</artifactId> <artifactId>hapi-fhir-testpage-overlay</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<classifier>classes</classifier> <classifier>classes</classifier>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -0,0 +1,25 @@
---
- item:
type: "add"
title: "The version of a few dependencies have been bumped to the latest versions
(dependent HAPI modules listed in brackets):
<ul>
<li>Commons-Lang3 (Core): 3.9 -> 3.12.0</li>
<li>Commons-Text (Core): 1.7 -> 1.9</li>
<li>Commons-Codec (Core): 1.14 -> 1.15</li>
<li>Commons-IO (Core): 2.6 -> 2.8.0</li>
<li>Guava (Core): 30.1-jre -> 30.1.1-jre</li>
<li>Jackson (Core): 2.12.1 -> 2.12.3</li>
<li>Woodstox (Core): 6.2.3 -> 6.2.5</li>
<li>Gson (JPA): 2.8.5 -> 2.8.6</li>
<li>Caffeine (JPA): 2.7.0 -> 3.0.1</li>
<li>Hibernate (JPA): 5.4.26.Final -> 5.4.30.Final</li>
<li>Hibernate Search (JPA): 6.0.0.Final -> 6.0.2.Final</li>
<li>Spring (JPA): 5.3.3 -> 5.3.6</li>
<li>Spring Batch (JPA): 4.2.3.RELEASE -> 4.3.2</li>
<li>Spring Data (JPA): 2.4.2 -> 2.4.7</li>
<li>Commons DBCP2 (JPA): 2.7.0 -> 2.8.0</li>
<li>Thymeleaf (Testpage Overlay): 3.0.11.RELEASE -> 3.0.12.RELEASE</li>
<li>JAnsi (CLI): 2.1.1 -> 2.3.2</li>
</ul>
"

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -67,6 +67,18 @@ public interface IFhirSystemDao<T, MT> extends IDao {
*/ */
IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage); IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage);
/**
* Executes a FHIR transaction using a new database transaction. This method must
* not be called from within a DB transaction.
*/
T transaction(RequestDetails theRequestDetails, T theResources); T transaction(RequestDetails theRequestDetails, T theResources);
/**
* Executes a FHIR transaction nested inside the current database transaction.
* This form of the transaction processor can handle write operations only (no reads)
*/
default T transactionNested(RequestDetails theRequestDetails, T theResources) {
throw new UnsupportedOperationException();
}
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>5.4.0-PRE5-SNAPSHOT</version> <version>5.4.0-PRE6-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -20,7 +20,8 @@ package ca.uhn.fhir.jpa.batch;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Import;
@ -28,9 +29,11 @@ import org.springframework.context.annotation.Import;
//When you define a new batch job, add it here. //When you define a new batch job, add it here.
@Import({ @Import({
CommonBatchJobConfig.class, CommonBatchJobConfig.class,
BulkExportJobConfig.class BulkExportJobConfig.class,
BulkImportJobConfig.class
}) })
public class BatchJobsConfig { public class BatchJobsConfig {
public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob";
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob"; public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob"; public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob"; public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob";

View File

@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.fhirpath.IFhirPath; import ca.uhn.fhir.fhirpath.IFhirPath;
import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.util.ExtensionUtil; import ca.uhn.fhir.util.ExtensionUtil;
import ca.uhn.fhir.util.HapiExtensions; import ca.uhn.fhir.util.HapiExtensions;

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.api; package ca.uhn.fhir.jpa.bulk.export.api;
/*- /*-
* #%L * #%L
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.api;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import javax.transaction.Transactional; import javax.transaction.Transactional;
@ -50,7 +50,7 @@ public interface IBulkDataExportSvc {
class JobInfo { class JobInfo {
private String myJobId; private String myJobId;
private BulkJobStatusEnum myStatus; private BulkExportJobStatusEnum myStatus;
private List<FileEntry> myFiles; private List<FileEntry> myFiles;
private String myRequest; private String myRequest;
private Date myStatusTime; private Date myStatusTime;
@ -90,11 +90,11 @@ public interface IBulkDataExportSvc {
} }
public BulkJobStatusEnum getStatus() { public BulkExportJobStatusEnum getStatus() {
return myStatus; return myStatus;
} }
public JobInfo setStatus(BulkJobStatusEnum theStatus) { public JobInfo setStatus(BulkExportJobStatusEnum theStatus) {
myStatus = theStatus; myStatus = theStatus;
return this; return this;
} }

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
@ -102,7 +101,7 @@ public abstract class BaseBulkItemReader implements ItemReader<List<ResourcePers
myPidIterator = getResourcePidIterator(); myPidIterator = getResourcePidIterator();
} }
abstract Iterator<ResourcePersistentId> getResourcePidIterator(); protected abstract Iterator<ResourcePersistentId> getResourcePidIterator();
protected List<SearchParameterMap> createSearchParameterMapsForResourceType() { protected List<SearchParameterMap> createSearchParameterMapsForResourceType() {
BulkExportJobEntity jobEntity = getJobEntity(); BulkExportJobEntity jobEntity = getJobEntity();

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -20,16 +20,12 @@ package ca.uhn.fhir.jpa.bulk.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener; import org.springframework.batch.core.StepExecutionListener;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/** /**
* Will run before and after a job to set the status to whatever is appropriate. * Will run before and after a job to set the status to whatever is appropriate.
@ -43,7 +39,7 @@ public class BulkExportCreateEntityStepListener implements StepExecutionListener
public void beforeStep(StepExecution theStepExecution) { public void beforeStep(StepExecution theStepExecution) {
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID"); String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID");
if (jobUuid != null) { if (jobUuid != null) {
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.BUILDING); myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.BUILDING);
} }
} }

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener; import org.springframework.batch.core.StepExecutionListener;
@ -55,7 +55,7 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio
} }
assert isNotBlank(jobUuid); assert isNotBlank(jobUuid);
String exitDescription = theStepExecution.getExitStatus().getExitDescription(); String exitDescription = theStepExecution.getExitStatus().getExitDescription();
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.ERROR, exitDescription); myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.ERROR, exitDescription);
} }
return theStepExecution.getExitStatus(); return theStepExecution.getExitStatus();
} }

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.scope.context.ChunkContext;
@ -44,9 +44,9 @@ public class BulkExportJobCloser implements Tasklet {
@Override @Override
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) { public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
if (theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus() == BatchStatus.STARTED) { if (theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus() == BatchStatus.STARTED) {
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.COMPLETE); myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.COMPLETE);
} else { } else {
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.ERROR); myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.ERROR);
} }
return RepeatStatus.FINISHED; return RepeatStatus.FINISHED;
} }

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.bulk.job;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor; import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor; import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc; import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
@ -35,8 +35,6 @@ import org.springframework.batch.core.configuration.annotation.JobScope;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope; import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.ItemProcessor; import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.support.CompositeItemProcessor; import org.springframework.batch.item.support.CompositeItemProcessor;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@ -59,6 +57,7 @@ public class BulkExportJobConfig {
public static final String GROUP_ID_PARAMETER = "groupId"; public static final String GROUP_ID_PARAMETER = "groupId";
public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes"; public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes";
public static final int CHUNK_SIZE = 100; public static final int CHUNK_SIZE = 100;
public static final String JOB_DESCRIPTION = "jobDescription";
@Autowired @Autowired
private StepBuilderFactory myStepBuilderFactory; private StepBuilderFactory myStepBuilderFactory;
@ -90,9 +89,9 @@ public class BulkExportJobConfig {
@Lazy @Lazy
public Job bulkExportJob() { public Job bulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME) return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
.validator(bulkJobParameterValidator()) .validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep()) .start(createBulkExportEntityStep())
.next(partitionStep()) .next(bulkExportPartitionStep())
.next(closeJobStep()) .next(closeJobStep())
.build(); .build();
} }
@ -114,7 +113,7 @@ public class BulkExportJobConfig {
public Job groupBulkExportJob() { public Job groupBulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME) return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
.validator(groupBulkJobParameterValidator()) .validator(groupBulkJobParameterValidator())
.validator(bulkJobParameterValidator()) .validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep()) .start(createBulkExportEntityStep())
.next(groupPartitionStep()) .next(groupPartitionStep())
.next(closeJobStep()) .next(closeJobStep())
@ -125,7 +124,7 @@ public class BulkExportJobConfig {
@Lazy @Lazy
public Job patientBulkExportJob() { public Job patientBulkExportJob() {
return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME) return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME)
.validator(bulkJobParameterValidator()) .validator(bulkExportJobParameterValidator())
.start(createBulkExportEntityStep()) .start(createBulkExportEntityStep())
.next(patientPartitionStep()) .next(patientPartitionStep())
.next(closeJobStep()) .next(closeJobStep())
@ -150,8 +149,9 @@ public class BulkExportJobConfig {
return new CreateBulkExportEntityTasklet(); return new CreateBulkExportEntityTasklet();
} }
@Bean @Bean
public JobParametersValidator bulkJobParameterValidator() { public JobParametersValidator bulkExportJobParameterValidator() {
return new BulkExportJobParameterValidator(); return new BulkExportJobParameterValidator();
} }
@ -159,7 +159,7 @@ public class BulkExportJobConfig {
@Bean @Bean
public Step groupBulkExportGenerateResourceFilesStep() { public Step groupBulkExportGenerateResourceFilesStep() {
return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep") return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep")
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(groupBulkItemReader()) .reader(groupBulkItemReader())
.processor(inflateResourceThenAnnotateWithGoldenResourceProcessor()) .processor(inflateResourceThenAnnotateWithGoldenResourceProcessor())
.writer(resourceToFileWriter()) .writer(resourceToFileWriter())
@ -170,17 +170,18 @@ public class BulkExportJobConfig {
@Bean @Bean
public Step bulkExportGenerateResourceFilesStep() { public Step bulkExportGenerateResourceFilesStep() {
return myStepBuilderFactory.get("bulkExportGenerateResourceFilesStep") return myStepBuilderFactory.get("bulkExportGenerateResourceFilesStep")
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(bulkItemReader()) .reader(bulkItemReader())
.processor(myPidToIBaseResourceProcessor) .processor(myPidToIBaseResourceProcessor)
.writer(resourceToFileWriter()) .writer(resourceToFileWriter())
.listener(bulkExportGenerateResourceFilesStepListener()) .listener(bulkExportGenerateResourceFilesStepListener())
.build(); .build();
} }
@Bean @Bean
public Step patientBulkExportGenerateResourceFilesStep() { public Step patientBulkExportGenerateResourceFilesStep() {
return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep") return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep")
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time. .<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(patientBulkItemReader()) .reader(patientBulkItemReader())
.processor(myPidToIBaseResourceProcessor) .processor(myPidToIBaseResourceProcessor)
.writer(resourceToFileWriter()) .writer(resourceToFileWriter())
@ -214,7 +215,7 @@ public class BulkExportJobConfig {
} }
@Bean @Bean
public Step partitionStep() { public Step bulkExportPartitionStep() {
return myStepBuilderFactory.get("partitionStep") return myStepBuilderFactory.get("partitionStep")
.partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner()) .partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner())
.step(bulkExportGenerateResourceFilesStep()) .step(bulkExportGenerateResourceFilesStep())
@ -240,7 +241,7 @@ public class BulkExportJobConfig {
@Bean @Bean
@StepScope @StepScope
public GroupBulkItemReader groupBulkItemReader(){ public GroupBulkItemReader groupBulkItemReader() {
return new GroupBulkItemReader(); return new GroupBulkItemReader();
} }
@ -252,7 +253,7 @@ public class BulkExportJobConfig {
@Bean @Bean
@StepScope @StepScope
public BulkItemReader bulkItemReader(){ public BulkItemReader bulkItemReader() {
return new BulkItemReader(); return new BulkItemReader();
} }

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.JobParametersValidator; import org.springframework.batch.core.JobParametersValidator;

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.JobParametersBuilder;

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.slf4j.Logger; import org.slf4j.Logger;
import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@ -43,7 +42,7 @@ public class BulkItemReader extends BaseBulkItemReader {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog(); private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
@Override @Override
Iterator<ResourcePersistentId> getResourcePidIterator() { protected Iterator<ResourcePersistentId> getResourcePidIterator() {
ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID); ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID);
Set<ResourcePersistentId> myReadPids = new HashSet<>(); Set<ResourcePersistentId> myReadPids = new HashSet<>();

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -87,7 +87,7 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
} }
} }
private void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) { public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
theChunkContext theChunkContext
.getStepContext() .getStepContext()
.getStepExecution() .getStepExecution()

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -36,7 +36,6 @@ import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.param.ReferenceOrListParam; import ca.uhn.fhir.rest.param.ReferenceOrListParam;
import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.ReferenceParam;
import com.google.common.collect.Multimaps;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -81,7 +80,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
private MdmExpansionCacheSvc myMdmExpansionCacheSvc; private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
@Override @Override
Iterator<ResourcePersistentId> getResourcePidIterator() { protected Iterator<ResourcePersistentId> getResourcePidIterator() {
Set<ResourcePersistentId> myReadPids = new HashSet<>(); Set<ResourcePersistentId> myReadPids = new HashSet<>();
//Short circuit out if we detect we are attempting to extract patients //Short circuit out if we detect we are attempting to extract patients

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -26,7 +26,7 @@ import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.JobParametersValidator; import org.springframework.batch.core.JobParametersValidator;
import static ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig.*; import static ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig.*;
import static org.slf4j.LoggerFactory.getLogger; import static org.slf4j.LoggerFactory.getLogger;
public class GroupIdPresentValidator implements JobParametersValidator { public class GroupIdPresentValidator implements JobParametersValidator {

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -61,7 +61,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea
} }
@Override @Override
Iterator<ResourcePersistentId> getResourcePidIterator() { protected Iterator<ResourcePersistentId> getResourcePidIterator() {
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) { if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export"; String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export";
ourLog.error(errorMessage); ourLog.error(errorMessage);

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -25,7 +25,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.job; package ca.uhn.fhir.jpa.bulk.export.job;
/*- /*-
* #%L * #%L
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.springframework.batch.core.partition.support.Partitioner; import org.springframework.batch.core.partition.support.Partitioner;
import org.springframework.batch.item.ExecutionContext; import org.springframework.batch.item.ExecutionContext;

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.model; package ca.uhn.fhir.jpa.bulk.export.model;
/*- /*-
* #%L * #%L
@ -20,7 +20,14 @@ package ca.uhn.fhir.jpa.bulk.model;
* #L% * #L%
*/ */
public enum BulkJobStatusEnum { import com.fasterxml.jackson.annotation.JsonFormat;
@JsonFormat(shape = JsonFormat.Shape.STRING)
public enum BulkExportJobStatusEnum {
/**
* Sorting OK!
*/
SUBMITTED, SUBMITTED,
BUILDING, BUILDING,

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.provider; package ca.uhn.fhir.jpa.bulk.export.provider;
/*- /*-
* #%L * #%L
@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.bulk.provider;
*/ */
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.IdParam;
import ca.uhn.fhir.rest.annotation.Operation; import ca.uhn.fhir.rest.annotation.Operation;

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.svc; package ca.uhn.fhir.jpa.bulk.export.svc;
/*- /*-
* #%L * #%L
@ -23,16 +23,15 @@ package ca.uhn.fhir.jpa.bulk.svc;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.fhirpath.IFhirPath;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
@ -43,16 +42,12 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBinary; import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.InstantType; import org.hl7.fhir.r4.model.InstantType;
import org.quartz.JobExecutionContext; import org.quartz.JobExecutionContext;
@ -78,9 +73,9 @@ import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP; import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.GROUP;
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT; import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.PATIENT;
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM; import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.SYSTEM;
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam;
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams; import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -136,7 +131,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
Optional<BulkExportJobEntity> jobToProcessOpt = myTxTemplate.execute(t -> { Optional<BulkExportJobEntity> jobToProcessOpt = myTxTemplate.execute(t -> {
Pageable page = PageRequest.of(0, 1); Pageable page = PageRequest.of(0, 1);
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkJobStatusEnum.SUBMITTED); Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkExportJobStatusEnum.SUBMITTED);
if (submittedJobs.isEmpty()) { if (submittedJobs.isEmpty()) {
return Optional.empty(); return Optional.empty();
} }
@ -158,7 +153,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
Optional<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByJobId(jobUuid); Optional<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByJobId(jobUuid);
if (submittedJobs.isPresent()) { if (submittedJobs.isPresent()) {
BulkExportJobEntity jobEntity = submittedJobs.get(); BulkExportJobEntity jobEntity = submittedJobs.get();
jobEntity.setStatus(BulkJobStatusEnum.ERROR); jobEntity.setStatus(BulkExportJobStatusEnum.ERROR);
jobEntity.setStatusMessage(e.getMessage()); jobEntity.setStatusMessage(e.getMessage());
myBulkExportJobDao.save(jobEntity); myBulkExportJobDao.save(jobEntity);
} }
@ -344,7 +339,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
if (useCache) { if (useCache) {
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis); Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
Pageable page = PageRequest.of(0, 10); Pageable page = PageRequest.of(0, 10);
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR); Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkExportJobStatusEnum.ERROR);
if (!existing.isEmpty()) { if (!existing.isEmpty()) {
return toSubmittedJobInfo(existing.iterator().next()); return toSubmittedJobInfo(existing.iterator().next());
} }
@ -373,7 +368,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
BulkExportJobEntity job = new BulkExportJobEntity(); BulkExportJobEntity job = new BulkExportJobEntity();
job.setJobId(UUID.randomUUID().toString()); job.setJobId(UUID.randomUUID().toString());
job.setStatus(BulkJobStatusEnum.SUBMITTED); job.setStatus(BulkExportJobStatusEnum.SUBMITTED);
job.setSince(since); job.setSince(since);
job.setCreated(new Date()); job.setCreated(new Date());
job.setRequest(request); job.setRequest(request);
@ -445,7 +440,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
retVal.setStatusMessage(job.getStatusMessage()); retVal.setStatusMessage(job.getStatusMessage());
retVal.setRequest(job.getRequest()); retVal.setRequest(job.getRequest());
if (job.getStatus() == BulkJobStatusEnum.COMPLETE) { if (job.getStatus() == BulkExportJobStatusEnum.COMPLETE) {
for (BulkExportCollectionEntity nextCollection : job.getCollections()) { for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) { for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
retVal.addFile() retVal.addFile()

View File

@ -1,4 +1,4 @@
package ca.uhn.fhir.jpa.bulk.svc; package ca.uhn.fhir.jpa.bulk.export.svc;
/*- /*-
* #%L * #%L
@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.bulk.svc;
* #L% * #L%
*/ */
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
@ -84,12 +82,12 @@ public class BulkExportDaoSvc {
} }
@Transactional @Transactional
public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus) { public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus) {
setJobToStatus(theJobUUID, theStatus, null); setJobToStatus(theJobUUID, theStatus, null);
} }
@Transactional @Transactional
public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus, String theStatusMessage) { public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus, String theStatusMessage) {
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(theJobUUID); Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(theJobUUID);
if (!oJob.isPresent()) { if (!oJob.isPresent()) {
ourLog.error("Job with UUID {} doesn't exist!", theJobUUID); ourLog.error("Job with UUID {} doesn't exist!", theJobUUID);

View File

@ -0,0 +1,93 @@
package ca.uhn.fhir.jpa.bulk.imprt.api;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import javax.annotation.Nonnull;
import java.util.List;
public interface IBulkDataImportSvc {
/**
* Create a new job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state (meaning it won't yet be worked on and can be added to)
*/
String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List<BulkImportJobFileJson> theInitialFiles);
/**
* Add more files to a job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state
*
* @param theJobId The job ID
* @param theFiles The files to add to the job
*/
void addFilesToJob(String theJobId, List<BulkImportJobFileJson> theFiles);
/**
* Move a job from {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING}
* state to {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY}
* state, meaning that is is a candidate to be picked up for processing
*
* @param theJobId The job ID
*/
void markJobAsReadyForActivation(String theJobId);
/**
* This method is intended to be called from the job scheduler, and will begin execution on
* the next job in status {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY}
*
* @return Returns {@literal true} if a job was activated
*/
boolean activateNextReadyJob();
/**
* Updates the job status for the given job
*/
void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus);
/**
* Updates the job status for the given job
*/
void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage);
/**
* Gets the number of files available for a given Job ID
*
* @param theJobId The job ID
* @return The file count
*/
BulkImportJobJson fetchJob(String theJobId);
/**
* Fetch a given file by job ID
*
* @param theJobId The job ID
* @param theFileIndex The index of the file within the job
* @return The file
*/
BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex);
/**
* Delete all input files associated with a particular job
*/
void deleteJobFiles(String theJobId);
}

View File

@ -0,0 +1,51 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Will run before and after a job to set the status to whatever is appropriate.
*/
public class ActivateBulkImportEntityStepListener implements StepExecutionListener {
@Autowired
private IBulkDataImportSvc myBulkImportDaoSvc;
@Override
public void beforeStep(StepExecution theStepExecution) {
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
if (jobUuid != null) {
myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING);
}
}
@Override
public ExitStatus afterStep(StepExecution theStepExecution) {
return ExitStatus.EXECUTING;
}
}

View File

@ -0,0 +1,76 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import ca.uhn.fhir.util.IoUtil;
import com.google.common.io.LineReader;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.batch.item.ItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.io.StringReader;
@SuppressWarnings("UnstableApiUsage")
public class BulkImportFileReader implements ItemReader<ParsedBulkImportRecord> {
@Autowired
private IBulkDataImportSvc myBulkDataImportSvc;
@Autowired
private FhirContext myFhirContext;
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
private String myJobUuid;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
private int myFileIndex;
private StringReader myReader;
private LineReader myLineReader;
private int myLineIndex;
private String myTenantName;
@Override
public ParsedBulkImportRecord read() throws Exception {
if (myReader == null) {
BulkImportJobFileJson file = myBulkDataImportSvc.fetchFile(myJobUuid, myFileIndex);
myTenantName = file.getTenantName();
myReader = new StringReader(file.getContents());
myLineReader = new LineReader(myReader);
}
String nextLine = myLineReader.readLine();
if (nextLine == null) {
IoUtil.closeQuietly(myReader);
return null;
}
Logs.getBatchTroubleshootingLog().debug("Reading line {} file index {} for job: {}", myLineIndex++, myFileIndex, myJobUuid);
IBaseResource parsed = myFhirContext.newJsonParser().parseResource(nextLine);
return new ParsedBulkImportRecord(myTenantName, parsed);
}
}

View File

@ -0,0 +1,74 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemWriter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import java.util.List;
public class BulkImportFileWriter implements ItemWriter<ParsedBulkImportRecord> {
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class);
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
private String myJobUuid;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
private int myFileIndex;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.ROW_PROCESSING_MODE + "']}")
private JobFileRowProcessingModeEnum myRowProcessingMode;
@Autowired
private DaoRegistry myDaoRegistry;
@SuppressWarnings({"SwitchStatementWithTooFewBranches", "rawtypes", "unchecked"})
@Override
public void write(List<? extends ParsedBulkImportRecord> theItemLists) throws Exception {
ourLog.info("Beginning bulk import write {} chunks Job[{}] FileIndex[{}]", theItemLists.size(), myJobUuid, myFileIndex);
for (ParsedBulkImportRecord nextItem : theItemLists) {
SystemRequestDetails requestDetails = new SystemRequestDetails();
requestDetails.setTenantId(nextItem.getTenantName());
// Yeah this is a lame switch - We'll add more later I swear
switch (myRowProcessingMode) {
default:
case FHIR_TRANSACTION:
IFhirSystemDao systemDao = myDaoRegistry.getSystemDao();
IBaseResource inputBundle = nextItem.getRowContent();
systemDao.transactionNested(requestDetails, inputBundle);
break;
}
}
}
}

View File

@ -0,0 +1,57 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
/**
* Will run before and after a job to set the status to whatever is appropriate.
*/
public class BulkImportJobCloser implements Tasklet {
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
private String myJobUUID;
@Autowired
private IBulkDataImportSvc myBulkDataImportSvc;
@Override
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
BatchStatus executionStatus = theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus();
if (executionStatus == BatchStatus.STARTED) {
myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.COMPLETE);
myBulkDataImportSvc.deleteJobFiles(myJobUUID);
} else {
myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.ERROR, "Found job in status: " + executionStatus);
myBulkDataImportSvc.deleteJobFiles(myJobUUID);
}
return RepeatStatus.FINISHED;
}
}

View File

@ -0,0 +1,169 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.batch.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersValidator;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.JobScope;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.partition.PartitionHandler;
import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.repeat.CompletionPolicy;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.core.task.TaskExecutor;
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a
* Bulk Export job.
*/
@Configuration
public class BulkImportJobConfig {
public static final String JOB_PARAM_COMMIT_INTERVAL = "commitInterval";
@Autowired
private StepBuilderFactory myStepBuilderFactory;
@Autowired
private JobBuilderFactory myJobBuilderFactory;
@Autowired
@Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR)
private TaskExecutor myTaskExecutor;
@Bean(name = BULK_IMPORT_JOB_NAME)
@Lazy
public Job bulkImportJob() throws Exception {
return myJobBuilderFactory.get(BULK_IMPORT_JOB_NAME)
.validator(bulkImportJobParameterValidator())
.start(bulkImportPartitionStep())
.next(bulkImportCloseJobStep())
.build();
}
@Bean
public JobParametersValidator bulkImportJobParameterValidator() {
return new BulkImportJobParameterValidator();
}
@Bean
public CreateBulkImportEntityTasklet createBulkImportEntityTasklet() {
return new CreateBulkImportEntityTasklet();
}
@Bean
@JobScope
public ActivateBulkImportEntityStepListener activateBulkImportEntityStepListener() {
return new ActivateBulkImportEntityStepListener();
}
@Bean
public Step bulkImportPartitionStep() throws Exception {
return myStepBuilderFactory.get("bulkImportPartitionStep")
.partitioner("bulkImportPartitionStep", bulkImportPartitioner())
.partitionHandler(partitionHandler())
.listener(activateBulkImportEntityStepListener())
.gridSize(10)
.build();
}
private PartitionHandler partitionHandler() throws Exception {
assert myTaskExecutor != null;
TaskExecutorPartitionHandler retVal = new TaskExecutorPartitionHandler();
retVal.setStep(bulkImportProcessFilesStep());
retVal.setTaskExecutor(myTaskExecutor);
retVal.afterPropertiesSet();
return retVal;
}
@Bean
public Step bulkImportCloseJobStep() {
return myStepBuilderFactory.get("bulkImportCloseJobStep")
.tasklet(bulkImportJobCloser())
.build();
}
@Bean
@JobScope
public BulkImportJobCloser bulkImportJobCloser() {
return new BulkImportJobCloser();
}
@Bean
@JobScope
public BulkImportPartitioner bulkImportPartitioner() {
return new BulkImportPartitioner();
}
@Bean
public Step bulkImportProcessFilesStep() {
CompletionPolicy completionPolicy = completionPolicy();
return myStepBuilderFactory.get("bulkImportProcessFilesStep")
.<ParsedBulkImportRecord, ParsedBulkImportRecord>chunk(completionPolicy)
.reader(bulkImportFileReader())
.writer(bulkImportFileWriter())
.listener(bulkImportStepListener())
.listener(completionPolicy)
.build();
}
@Bean
@StepScope
public CompletionPolicy completionPolicy() {
return new BulkImportProcessStepCompletionPolicy();
}
@Bean
@StepScope
public ItemWriter<ParsedBulkImportRecord> bulkImportFileWriter() {
return new BulkImportFileWriter();
}
@Bean
@StepScope
public BulkImportFileReader bulkImportFileReader() {
return new BulkImportFileReader();
}
@Bean
@StepScope
public BulkImportStepListener bulkImportStepListener() {
return new BulkImportStepListener();
}
}

View File

@ -0,0 +1,70 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import org.apache.commons.lang3.StringUtils;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.JobParametersValidator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.Optional;
/**
* This class will prevent a job from running if the UUID does not exist or is invalid.
*/
public class BulkImportJobParameterValidator implements JobParametersValidator {
@Autowired
private IBulkImportJobDao myBulkImportJobDao;
@Autowired
private PlatformTransactionManager myTransactionManager;
@Override
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
if (theJobParameters == null) {
throw new JobParametersInvalidException("This job needs Parameters: [jobUUID]");
}
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
String errorMessage = txTemplate.execute(tx -> {
StringBuilder errorBuilder = new StringBuilder();
String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
Optional<BulkImportJobEntity> oJob = myBulkImportJobDao.findByJobId(jobUUID);
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
errorBuilder.append("There is no persisted job that exists with UUID: ");
errorBuilder.append(jobUUID);
errorBuilder.append(". ");
}
return errorBuilder.toString();
});
if (!StringUtils.isEmpty(errorMessage)) {
throw new JobParametersInvalidException(errorMessage);
}
}
}

View File

@ -0,0 +1,72 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import org.slf4j.Logger;
import org.springframework.batch.core.partition.support.Partitioner;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import javax.annotation.Nonnull;
import java.util.HashMap;
import java.util.Map;
import static org.slf4j.LoggerFactory.getLogger;
public class BulkImportPartitioner implements Partitioner {
public static final String FILE_INDEX = "fileIndex";
public static final String ROW_PROCESSING_MODE = "rowProcessingMode";
private static final Logger ourLog = getLogger(BulkImportPartitioner.class);
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
private String myJobUUID;
@Autowired
private IBulkDataImportSvc myBulkDataImportSvc;
@Nonnull
@Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> retVal = new HashMap<>();
BulkImportJobJson job = myBulkDataImportSvc.fetchJob(myJobUUID);
for (int i = 0; i < job.getFileCount(); i++) {
ExecutionContext context = new ExecutionContext();
context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID);
context.putInt(FILE_INDEX, i);
context.put(ROW_PROCESSING_MODE, job.getProcessingMode());
String key = "FILE" + i;
retVal.put(key, context);
}
return retVal;
}
}

View File

@ -0,0 +1,41 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.springframework.batch.repeat.RepeatContext;
import org.springframework.batch.repeat.policy.CompletionPolicySupport;
import org.springframework.beans.factory.annotation.Value;
import static ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL;
public class BulkImportProcessStepCompletionPolicy extends CompletionPolicySupport {
@Value("#{jobParameters['" + JOB_PARAM_COMMIT_INTERVAL + "']}")
private int myChunkSize;
@Override
public boolean isComplete(RepeatContext context) {
if (context.getStartedCount() < myChunkSize) {
return false;
}
return true;
}
}

View File

@ -0,0 +1,63 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* This class sets the job status to ERROR if any failures occur while actually
* generating the export files.
*/
public class BulkImportStepListener implements StepExecutionListener {
@Autowired
private IBulkDataImportSvc myBulkDataImportSvc;
@Override
public void beforeStep(@Nonnull StepExecution stepExecution) {
// nothing
}
@Override
public ExitStatus afterStep(StepExecution theStepExecution) {
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
if (jobUuid == null) {
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
}
assert isNotBlank(jobUuid);
String exitDescription = theStepExecution.getExitStatus().getExitDescription();
myBulkDataImportSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.ERROR, exitDescription);
}
return theStepExecution.getExitStatus();
}
}

View File

@ -0,0 +1,45 @@
package ca.uhn.fhir.jpa.bulk.imprt.job;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet;
import ca.uhn.fhir.util.ValidateUtil;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import java.util.Map;
public class CreateBulkImportEntityTasklet implements Tasklet {
@Override
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) throws Exception {
Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters();
//We can leave early if they provided us with an existing job.
ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER), "Job doesn't have a UUID");
CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED;
}
}

View File

@ -0,0 +1,51 @@
package ca.uhn.fhir.jpa.bulk.imprt.model;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.model.api.IModelJson;
import com.fasterxml.jackson.annotation.JsonProperty;
public class BulkImportJobFileJson implements IModelJson {
@JsonProperty("tenantName")
private String myTenantName;
@JsonProperty("contents")
private String myContents;
public String getTenantName() {
return myTenantName;
}
public BulkImportJobFileJson setTenantName(String theTenantName) {
myTenantName = theTenantName;
return this;
}
public String getContents() {
return myContents;
}
public BulkImportJobFileJson setContents(String theContents) {
myContents = theContents;
return this;
}
}

View File

@ -0,0 +1,72 @@
package ca.uhn.fhir.jpa.bulk.imprt.model;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.model.api.IModelJson;
import com.fasterxml.jackson.annotation.JsonProperty;
public class BulkImportJobJson implements IModelJson {
@JsonProperty("processingMode")
private JobFileRowProcessingModeEnum myProcessingMode;
@JsonProperty("jobDescription")
private String myJobDescription;
@JsonProperty("fileCount")
private int myFileCount;
@JsonProperty("batchSize")
private int myBatchSize;
public String getJobDescription() {
return myJobDescription;
}
public BulkImportJobJson setJobDescription(String theJobDescription) {
myJobDescription = theJobDescription;
return this;
}
public JobFileRowProcessingModeEnum getProcessingMode() {
return myProcessingMode;
}
public BulkImportJobJson setProcessingMode(JobFileRowProcessingModeEnum theProcessingMode) {
myProcessingMode = theProcessingMode;
return this;
}
public int getFileCount() {
return myFileCount;
}
public BulkImportJobJson setFileCount(int theFileCount) {
myFileCount = theFileCount;
return this;
}
public int getBatchSize() {
return myBatchSize;
}
public BulkImportJobJson setBatchSize(int theBatchSize) {
myBatchSize = theBatchSize;
return this;
}
}

View File

@ -0,0 +1,34 @@
package ca.uhn.fhir.jpa.bulk.imprt.model;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.fasterxml.jackson.annotation.JsonFormat;
@JsonFormat(shape = JsonFormat.Shape.STRING)
public enum BulkImportJobStatusEnum {
STAGING,
READY,
RUNNING,
COMPLETE,
ERROR
}

View File

@ -0,0 +1,34 @@
package ca.uhn.fhir.jpa.bulk.imprt.model;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.fasterxml.jackson.annotation.JsonFormat;
@JsonFormat(shape = JsonFormat.Shape.STRING)
public enum JobFileRowProcessingModeEnum {
/**
* Sorting OK
*/
FHIR_TRANSACTION
}

View File

@ -0,0 +1,46 @@
package ca.uhn.fhir.jpa.bulk.imprt.model;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.io.Serializable;
public class ParsedBulkImportRecord implements Serializable {
private static final long serialVersionUID = 1L;
private final String myTenantName;
private final IBaseResource myRowContent;
public ParsedBulkImportRecord(String theTenantName, IBaseResource theRowContent) {
myTenantName = theTenantName;
myRowContent = theRowContent;
}
public String getTenantName() {
return myTenantName;
}
public IBaseResource getRowContent() {
return myRowContent;
}
}

View File

@ -0,0 +1,280 @@
package ca.uhn.fhir.jpa.bulk.imprt.svc;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.util.ValidateUtil;
import org.apache.commons.lang3.time.DateUtils;
import org.quartz.JobExecutionContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.transaction.Transactional;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataImportSvcImpl.class);
@Autowired
private IBulkImportJobDao myJobDao;
@Autowired
private IBulkImportJobFileDao myJobFileDao;
@Autowired
private PlatformTransactionManager myTxManager;
private TransactionTemplate myTxTemplate;
@Autowired
private ISchedulerService mySchedulerService;
@Autowired
private IBatchJobSubmitter myJobSubmitter;
@Autowired
@Qualifier(BatchJobsConfig.BULK_IMPORT_JOB_NAME)
private org.springframework.batch.core.Job myBulkImportJob;
@PostConstruct
public void start() {
myTxTemplate = new TransactionTemplate(myTxManager);
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
jobDetail.setId(ActivationJob.class.getName());
jobDetail.setJobClass(ActivationJob.class);
mySchedulerService.scheduleClusteredJob(10 * DateUtils.MILLIS_PER_SECOND, jobDetail);
}
@Override
@Transactional
public String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List<BulkImportJobFileJson> theInitialFiles) {
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription, "Job must not be null");
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription.getProcessingMode(), "Job File Processing mode must not be null");
ValidateUtil.isTrueOrThrowInvalidRequest(theJobDescription.getBatchSize() > 0, "Job File Batch Size must be > 0");
String jobId = UUID.randomUUID().toString();
ourLog.info("Creating new Bulk Import job with {} files, assigning job ID: {}", theInitialFiles.size(), jobId);
BulkImportJobEntity job = new BulkImportJobEntity();
job.setJobId(jobId);
job.setFileCount(theInitialFiles.size());
job.setStatus(BulkImportJobStatusEnum.STAGING);
job.setJobDescription(theJobDescription.getJobDescription());
job.setBatchSize(theJobDescription.getBatchSize());
job.setRowProcessingMode(theJobDescription.getProcessingMode());
job = myJobDao.save(job);
int nextSequence = 0;
addFilesToJob(theInitialFiles, job, nextSequence);
return jobId;
}
@Override
@Transactional
public void addFilesToJob(String theJobId, List<BulkImportJobFileJson> theFiles) {
ourLog.info("Adding {} files to bulk import job: {}", theFiles.size(), theJobId);
BulkImportJobEntity job = findJobByJobId(theJobId);
ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Job %s has status %s and can not be added to", theJobId, job.getStatus());
addFilesToJob(theFiles, job, job.getFileCount());
job.setFileCount(job.getFileCount() + theFiles.size());
myJobDao.save(job);
}
private BulkImportJobEntity findJobByJobId(String theJobId) {
BulkImportJobEntity job = myJobDao
.findByJobId(theJobId)
.orElseThrow(() -> new InvalidRequestException("Unknown job ID: " + theJobId));
return job;
}
@Override
@Transactional
public void markJobAsReadyForActivation(String theJobId) {
ourLog.info("Activating bulk import job {}", theJobId);
BulkImportJobEntity job = findJobByJobId(theJobId);
ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Bulk import job %s can not be activated in status: %s", theJobId, job.getStatus());
job.setStatus(BulkImportJobStatusEnum.READY);
myJobDao.save(job);
}
/**
* To be called by the job scheduler
*/
@Transactional(value = Transactional.TxType.NEVER)
@Override
public boolean activateNextReadyJob() {
Optional<BulkImportJobEntity> jobToProcessOpt = Objects.requireNonNull(myTxTemplate.execute(t -> {
Pageable page = PageRequest.of(0, 1);
Slice<BulkImportJobEntity> submittedJobs = myJobDao.findByStatus(page, BulkImportJobStatusEnum.READY);
if (submittedJobs.isEmpty()) {
return Optional.empty();
}
return Optional.of(submittedJobs.getContent().get(0));
}));
if (!jobToProcessOpt.isPresent()) {
return false;
}
BulkImportJobEntity bulkImportJobEntity = jobToProcessOpt.get();
String jobUuid = bulkImportJobEntity.getJobId();
try {
processJob(bulkImportJobEntity);
} catch (Exception e) {
ourLog.error("Failure while preparing bulk export extract", e);
myTxTemplate.execute(t -> {
Optional<BulkImportJobEntity> submittedJobs = myJobDao.findByJobId(jobUuid);
if (submittedJobs.isPresent()) {
BulkImportJobEntity jobEntity = submittedJobs.get();
jobEntity.setStatus(BulkImportJobStatusEnum.ERROR);
jobEntity.setStatusMessage(e.getMessage());
myJobDao.save(jobEntity);
}
return false;
});
}
return true;
}
@Override
@Transactional
public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus) {
setJobToStatus(theJobId, theStatus, null);
}
@Override
public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage) {
BulkImportJobEntity job = findJobByJobId(theJobId);
job.setStatus(theStatus);
job.setStatusMessage(theStatusMessage);
myJobDao.save(job);
}
@Override
@Transactional
public BulkImportJobJson fetchJob(String theJobId) {
BulkImportJobEntity job = findJobByJobId(theJobId);
return job.toJson();
}
@Transactional
@Override
public BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex) {
BulkImportJobEntity job = findJobByJobId(theJobId);
return myJobFileDao
.findForJob(job, theFileIndex)
.map(t -> t.toJson())
.orElseThrow(() -> new IllegalArgumentException("Invalid index " + theFileIndex + " for job " + theJobId));
}
@Override
@Transactional
public void deleteJobFiles(String theJobId) {
BulkImportJobEntity job = findJobByJobId(theJobId);
List<Long> files = myJobFileDao.findAllIdsForJob(theJobId);
for (Long next : files) {
myJobFileDao.deleteById(next);
}
myJobDao.delete(job);
}
private void processJob(BulkImportJobEntity theBulkExportJobEntity) throws JobParametersInvalidException {
String jobId = theBulkExportJobEntity.getJobId();
int batchSize = theBulkExportJobEntity.getBatchSize();
ValidateUtil.isTrueOrThrowInvalidRequest(batchSize > 0, "Batch size must be positive");
JobParametersBuilder parameters = new JobParametersBuilder()
.addString(BulkExportJobConfig.JOB_UUID_PARAMETER, jobId)
.addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize);
if(isNotBlank(theBulkExportJobEntity.getJobDescription())) {
parameters.addString(BulkExportJobConfig.JOB_DESCRIPTION, theBulkExportJobEntity.getJobDescription());
}
ourLog.info("Submitting bulk import job {} to job scheduler", jobId);
myJobSubmitter.runJob(myBulkImportJob, parameters.toJobParameters());
}
private void addFilesToJob(@Nonnull List<BulkImportJobFileJson> theInitialFiles, BulkImportJobEntity job, int nextSequence) {
for (BulkImportJobFileJson nextFile : theInitialFiles) {
ValidateUtil.isNotBlankOrThrowUnprocessableEntity(nextFile.getContents(), "Job File Contents mode must not be null");
BulkImportJobFileEntity jobFile = new BulkImportJobFileEntity();
jobFile.setJob(job);
jobFile.setContents(nextFile.getContents());
jobFile.setTenantName(nextFile.getTenantName());
jobFile.setFileSequence(nextSequence++);
myJobFileDao.save(jobFile);
}
}
public static class ActivationJob implements HapiJob {
@Autowired
private IBulkDataImportSvc myTarget;
@Override
public void execute(JobExecutionContext theContext) {
myTarget.activateNextReadyJob();
}
}
}

View File

@ -11,15 +11,18 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.BatchConstants;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer; import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl; import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl; import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc; import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl; import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider; import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider;
@ -29,6 +32,7 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder; import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService; import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory; import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService; import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation; import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
@ -63,7 +67,6 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices; import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor; import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor;
import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor; import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder; import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager; import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager;
@ -95,8 +98,8 @@ import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ForcedIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ForcedIdPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder;
@ -129,6 +132,7 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.validation.JpaResourceLoader; import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
import ca.uhn.fhir.jpa.validation.ValidationSettings; import ca.uhn.fhir.jpa.validation.ValidationSettings;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor; import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
import org.hibernate.jpa.HibernatePersistenceProvider; import org.hibernate.jpa.HibernatePersistenceProvider;
@ -160,6 +164,7 @@ import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import java.util.Date; import java.util.Date;
import java.util.concurrent.RejectedExecutionHandler;
/* /*
* #%L * #%L
@ -199,24 +204,23 @@ public abstract class BaseConfig {
public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider"; public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider";
public static final String SEARCH_BUILDER = "SearchBuilder"; public static final String SEARCH_BUILDER = "SearchBuilder";
public static final String HISTORY_BUILDER = "HistoryBuilder"; public static final String HISTORY_BUILDER = "HistoryBuilder";
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
public static final String REPOSITORY_VALIDATING_RULE_BUILDER = "repositoryValidatingRuleBuilder"; public static final String REPOSITORY_VALIDATING_RULE_BUILDER = "repositoryValidatingRuleBuilder";
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
@Autowired @Autowired
protected Environment myEnv; protected Environment myEnv;
@Autowired @Autowired
private DaoRegistry myDaoRegistry; private DaoRegistry myDaoRegistry;
private Integer searchCoordCorePoolSize = 20;
private Integer searchCoordMaxPoolSize = 100;
private Integer searchCoordQueueCapacity = 200;
/** /**
* Subclasses may override this method to provide settings such as search coordinator pool sizes. * Subclasses may override this method to provide settings such as search coordinator pool sizes.
*/ */
@PostConstruct @PostConstruct
public void initSettings() {} public void initSettings() {
}
private Integer searchCoordCorePoolSize = 20;
private Integer searchCoordMaxPoolSize = 100;
private Integer searchCoordQueueCapacity = 200;
public void setSearchCoordCorePoolSize(Integer searchCoordCorePoolSize) { public void setSearchCoordCorePoolSize(Integer searchCoordCorePoolSize) {
this.searchCoordCorePoolSize = searchCoordCorePoolSize; this.searchCoordCorePoolSize = searchCoordCorePoolSize;
@ -297,6 +301,11 @@ public abstract class BaseConfig {
return new SubscriptionTriggeringProvider(); return new SubscriptionTriggeringProvider();
} }
@Bean
public TransactionProcessor transactionProcessor() {
return new TransactionProcessor();
}
@Bean(name = "myAttachmentBinaryAccessProvider") @Bean(name = "myAttachmentBinaryAccessProvider")
@Lazy @Lazy
public BinaryAccessProvider binaryAccessProvider() { public BinaryAccessProvider binaryAccessProvider() {
@ -381,13 +390,15 @@ public abstract class BaseConfig {
return retVal; return retVal;
} }
@Bean @Bean(name= BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR)
public TaskExecutor jobLaunchingTaskExecutor() { public TaskExecutor jobLaunchingTaskExecutor() {
ThreadPoolTaskExecutor asyncTaskExecutor = new ThreadPoolTaskExecutor(); ThreadPoolTaskExecutor asyncTaskExecutor = new ThreadPoolTaskExecutor();
asyncTaskExecutor.setCorePoolSize(5); asyncTaskExecutor.setCorePoolSize(0);
asyncTaskExecutor.setMaxPoolSize(10); asyncTaskExecutor.setMaxPoolSize(10);
asyncTaskExecutor.setQueueCapacity(500); asyncTaskExecutor.setQueueCapacity(0);
asyncTaskExecutor.setAllowCoreThreadTimeOut(true);
asyncTaskExecutor.setThreadNamePrefix("JobLauncher-"); asyncTaskExecutor.setThreadNamePrefix("JobLauncher-");
asyncTaskExecutor.setRejectedExecutionHandler(new ResourceReindexingSvcImpl.BlockPolicy());
asyncTaskExecutor.initialize(); asyncTaskExecutor.initialize();
return asyncTaskExecutor; return asyncTaskExecutor;
} }
@ -514,6 +525,11 @@ public abstract class BaseConfig {
return new BulkDataExportProvider(); return new BulkDataExportProvider();
} }
@Bean
@Lazy
public IBulkDataImportSvc bulkDataImportSvc() {
return new BulkDataImportSvcImpl();
}
@Bean @Bean
public PersistedJpaBundleProviderFactory persistedJpaBundleProviderFactory() { public PersistedJpaBundleProviderFactory persistedJpaBundleProviderFactory() {

View File

@ -7,6 +7,9 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport; import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.TransactionProcessorVersionAdapterDstu2;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
import ca.uhn.fhir.jpa.term.TermReadSvcDstu2; import ca.uhn.fhir.jpa.term.TermReadSvcDstu2;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.jpa.util.ResourceCountCache;
@ -93,6 +96,12 @@ public class BaseDstu2Config extends BaseConfig {
return retVal; return retVal;
} }
@Bean
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
return new TransactionProcessorVersionAdapterDstu2();
}
@Bean(name = "myDefaultProfileValidationSupport") @Bean(name = "myDefaultProfileValidationSupport")
public DefaultProfileValidationSupport defaultProfileValidationSupport() { public DefaultProfileValidationSupport defaultProfileValidationSupport() {
return new DefaultProfileValidationSupport(fhirContext()); return new DefaultProfileValidationSupport(fhirContext());

View File

@ -87,11 +87,6 @@ public class BaseDstu3Config extends BaseConfigDstu3Plus {
return new TransactionProcessorVersionAdapterDstu3(); return new TransactionProcessorVersionAdapterDstu3();
} }
@Bean
public TransactionProcessor transactionProcessor() {
return new TransactionProcessor();
}
@Bean(name = "myResourceCountsCache") @Bean(name = "myResourceCountsCache")
public ResourceCountCache resourceCountsCache() { public ResourceCountCache resourceCountsCache() {
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts()); ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());

View File

@ -82,11 +82,6 @@ public class BaseR4Config extends BaseConfigDstu3Plus {
return new TransactionProcessorVersionAdapterR4(); return new TransactionProcessorVersionAdapterR4();
} }
@Bean
public TransactionProcessor transactionProcessor() {
return new TransactionProcessor();
}
@Bean(name = GRAPHQL_PROVIDER_NAME) @Bean(name = GRAPHQL_PROVIDER_NAME)
@Lazy @Lazy
public GraphQLProvider graphQLProvider() { public GraphQLProvider graphQLProvider() {

View File

@ -80,11 +80,6 @@ public class BaseR5Config extends BaseConfigDstu3Plus {
return new TransactionProcessorVersionAdapterR5(); return new TransactionProcessorVersionAdapterR5();
} }
@Bean
public TransactionProcessor transactionProcessor() {
return new TransactionProcessor();
}
@Bean(name = GRAPHQL_PROVIDER_NAME) @Bean(name = GRAPHQL_PROVIDER_NAME)
@Lazy @Lazy
public GraphQLProvider graphQLProvider() { public GraphQLProvider graphQLProvider() {

View File

@ -3,13 +3,14 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.util.ResourceCountCache; import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
@ -17,6 +18,7 @@ import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -42,14 +44,26 @@ import java.util.Map;
* #L% * #L%
*/ */
public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBaseResource> implements IFhirSystemDao<T, MT> { public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends BaseHapiFhirDao<IBaseResource> implements IFhirSystemDao<T, MT> {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class);
@Autowired @Autowired
@Qualifier("myResourceCountsCache") @Qualifier("myResourceCountsCache")
public ResourceCountCache myResourceCountsCache; public ResourceCountCache myResourceCountsCache;
@Autowired @Autowired
private PartitionSettings myPartitionSettings; private TransactionProcessor myTransactionProcessor;
@VisibleForTesting
public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) {
myTransactionProcessor = theTransactionProcessor;
}
@Override
@PostConstruct
public void start() {
super.start();
myTransactionProcessor.setDao(this);
}
@Override @Override
@Transactional(propagation = Propagation.NEVER) @Transactional(propagation = Propagation.NEVER)
@ -91,6 +105,18 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
return retVal; return retVal;
} }
@Override
@Transactional(propagation = Propagation.NEVER)
public T transaction(RequestDetails theRequestDetails, T theRequest) {
return myTransactionProcessor.transaction(theRequestDetails, theRequest, false);
}
@Override
@Transactional(propagation = Propagation.MANDATORY)
public T transactionNested(RequestDetails theRequestDetails, T theRequest) {
return myTransactionProcessor.transaction(theRequestDetails, theRequest, true);
}
@Nullable @Nullable
@Override @Override

View File

@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster; import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.parser.IParser;
@ -142,14 +143,14 @@ public abstract class BaseTransactionProcessor {
ourLog.trace("Starting transaction processor"); ourLog.trace("Starting transaction processor");
} }
public <BUNDLE extends IBaseBundle> BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest) { public <BUNDLE extends IBaseBundle> BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest, boolean theNestedMode) {
if (theRequestDetails != null && theRequestDetails.getServer() != null && myDao != null) { if (theRequestDetails != null && theRequestDetails.getServer() != null && myDao != null) {
IServerInterceptor.ActionRequestDetails requestDetails = new IServerInterceptor.ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null); IServerInterceptor.ActionRequestDetails requestDetails = new IServerInterceptor.ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null);
myDao.notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails); myDao.notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails);
} }
String actionName = "Transaction"; String actionName = "Transaction";
IBaseBundle response = processTransactionAsSubRequest((RequestDetails) theRequestDetails, theRequest, actionName); IBaseBundle response = processTransactionAsSubRequest(theRequestDetails, theRequest, actionName, theNestedMode);
List<IBase> entries = myVersionAdapter.getEntries(response); List<IBase> entries = myVersionAdapter.getEntries(response);
for (int i = 0; i < entries.size(); i++) { for (int i = 0; i < entries.size(); i++) {
@ -190,7 +191,7 @@ public abstract class BaseTransactionProcessor {
myVersionAdapter.setRequestUrl(entry, next.getIdElement().toUnqualifiedVersionless().getValue()); myVersionAdapter.setRequestUrl(entry, next.getIdElement().toUnqualifiedVersionless().getValue());
} }
transaction(theRequestDetails, transactionBundle); transaction(theRequestDetails, transactionBundle, false);
return resp; return resp;
} }
@ -270,10 +271,10 @@ public abstract class BaseTransactionProcessor {
myDao = theDao; myDao = theDao;
} }
private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName) { private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName, boolean theNestedMode) {
BaseHapiFhirDao.markRequestAsProcessingSubRequest(theRequestDetails); BaseHapiFhirDao.markRequestAsProcessingSubRequest(theRequestDetails);
try { try {
return processTransaction(theRequestDetails, theRequest, theActionName); return processTransaction(theRequestDetails, theRequest, theActionName, theNestedMode);
} finally { } finally {
BaseHapiFhirDao.clearRequestAsProcessingSubRequest(theRequestDetails); BaseHapiFhirDao.clearRequestAsProcessingSubRequest(theRequestDetails);
} }
@ -289,7 +290,7 @@ public abstract class BaseTransactionProcessor {
myTxManager = theTxManager; myTxManager = theTxManager;
} }
private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest) { private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest, boolean theNestedMode) {
ourLog.info("Beginning batch with {} resources", myVersionAdapter.getEntries(theRequest).size()); ourLog.info("Beginning batch with {} resources", myVersionAdapter.getEntries(theRequest).size());
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@ -310,7 +311,7 @@ public abstract class BaseTransactionProcessor {
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode()); IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
myVersionAdapter.addEntry(subRequestBundle, (IBase) nextRequestEntry); myVersionAdapter.addEntry(subRequestBundle, (IBase) nextRequestEntry);
IBaseBundle nextResponseBundle = processTransactionAsSubRequest((ServletRequestDetails) theRequestDetails, subRequestBundle, "Batch sub-request"); IBaseBundle nextResponseBundle = processTransactionAsSubRequest(theRequestDetails, subRequestBundle, "Batch sub-request", theNestedMode);
IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0); IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myVersionAdapter.addEntry(resp, subResponseEntry); myVersionAdapter.addEntry(resp, subResponseEntry);
@ -341,7 +342,7 @@ public abstract class BaseTransactionProcessor {
} }
long delay = System.currentTimeMillis() - start; long delay = System.currentTimeMillis() - start;
ourLog.info("Batch completed in {}ms", new Object[]{delay}); ourLog.info("Batch completed in {}ms", delay);
return resp; return resp;
} }
@ -351,13 +352,13 @@ public abstract class BaseTransactionProcessor {
myHapiTransactionService = theHapiTransactionService; myHapiTransactionService = theHapiTransactionService;
} }
private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName) { private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName, boolean theNestedMode) {
validateDependencies(); validateDependencies();
String transactionType = myVersionAdapter.getBundleType(theRequest); String transactionType = myVersionAdapter.getBundleType(theRequest);
if (org.hl7.fhir.r4.model.Bundle.BundleType.BATCH.toCode().equals(transactionType)) { if (org.hl7.fhir.r4.model.Bundle.BundleType.BATCH.toCode().equals(transactionType)) {
return batch(theRequestDetails, theRequest); return batch(theRequestDetails, theRequest, theNestedMode);
} }
if (transactionType == null) { if (transactionType == null) {
@ -465,6 +466,10 @@ public abstract class BaseTransactionProcessor {
} }
for (IBase nextReqEntry : getEntries) { for (IBase nextReqEntry : getEntries) {
if (theNestedMode) {
throw new InvalidRequestException("Can not invoke read operation on nested transaction");
}
if (!(theRequestDetails instanceof ServletRequestDetails)) { if (!(theRequestDetails instanceof ServletRequestDetails)) {
throw new MethodNotAllowedException("Can not call transaction GET methods from this context"); throw new MethodNotAllowedException("Can not call transaction GET methods from this context");
} }
@ -976,7 +981,12 @@ public abstract class BaseTransactionProcessor {
} }
} }
IPrimitiveType<Date> deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource); IPrimitiveType<Date> deletedInstantOrNull;
if (nextResource instanceof IAnyResource) {
deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource);
} else {
deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) nextResource);
}
Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null; Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null;
IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(nextResource.getClass()); IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(nextResource.getClass());

View File

@ -20,559 +20,19 @@ package ca.uhn.fhir.jpa.dao;
* #L% * #L%
*/ */
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt;
import ca.uhn.fhir.model.dstu2.composite.MetaDt; import ca.uhn.fhir.model.dstu2.composite.MetaDt;
import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
import ca.uhn.fhir.model.dstu2.resource.Bundle.EntryResponse;
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.primitive.UriDt;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.RequestTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.NotModifiedException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.rest.server.method.BaseMethodBinding;
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.util.UrlUtil.UrlParts;
import com.google.common.collect.ArrayListMultimap;
import org.apache.http.NameValuePair;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import javax.persistence.TypedQuery; import javax.persistence.TypedQuery;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao<Bundle, MetaDt> { public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao<Bundle, MetaDt> {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoDstu2.class);
@Autowired
private PlatformTransactionManager myTxManager;
@Autowired
private MatchUrlService myMatchUrlService;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private MatchResourceUrlService myMatchResourceUrlService;
@Autowired
private HapiTransactionService myHapiTransactionalService;
private Bundle batch(final RequestDetails theRequestDetails, Bundle theRequest) {
ourLog.info("Beginning batch with {} resources", theRequest.getEntry().size());
long start = System.currentTimeMillis();
Bundle resp = new Bundle();
resp.setType(BundleTypeEnum.BATCH_RESPONSE);
/*
* For batch, we handle each entry as a mini-transaction in its own database transaction so that if one fails, it doesn't prevent others
*/
for (final Entry nextRequestEntry : theRequest.getEntry()) {
TransactionCallback<Bundle> callback = new TransactionCallback<Bundle>() {
@Override
public Bundle doInTransaction(TransactionStatus theStatus) {
Bundle subRequestBundle = new Bundle();
subRequestBundle.setType(BundleTypeEnum.TRANSACTION);
subRequestBundle.addEntry(nextRequestEntry);
return transaction((ServletRequestDetails) theRequestDetails, subRequestBundle, "Batch sub-request");
}
};
BaseServerResponseException caughtEx;
try {
Bundle nextResponseBundle;
if (nextRequestEntry.getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.GET) {
// Don't process GETs in a transaction because they'll
// create their own
nextResponseBundle = callback.doInTransaction(null);
} else {
nextResponseBundle = myHapiTransactionalService.execute(theRequestDetails, callback);
}
caughtEx = null;
Entry subResponseEntry = nextResponseBundle.getEntry().get(0);
resp.addEntry(subResponseEntry);
/*
* If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it
*/
if (subResponseEntry.getResource() == null) {
subResponseEntry.setResource(nextResponseBundle.getEntry().get(0).getResource());
}
} catch (BaseServerResponseException e) {
caughtEx = e;
} catch (Throwable t) {
ourLog.error("Failure during BATCH sub transaction processing", t);
caughtEx = new InternalErrorException(t);
}
if (caughtEx != null) {
Entry nextEntry = resp.addEntry();
OperationOutcome oo = new OperationOutcome();
oo.addIssue().setSeverity(IssueSeverityEnum.ERROR).setDiagnostics(caughtEx.getMessage());
nextEntry.setResource(oo);
EntryResponse nextEntryResp = nextEntry.getResponse();
nextEntryResp.setStatus(toStatusString(caughtEx.getStatusCode()));
}
}
long delay = System.currentTimeMillis() - start;
ourLog.info("Batch completed in {}ms", new Object[] {delay});
return resp;
}
@SuppressWarnings("unchecked")
private Bundle doTransaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) {
BundleTypeEnum transactionType = theRequest.getTypeElement().getValueAsEnum();
if (transactionType == BundleTypeEnum.BATCH) {
return batch(theRequestDetails, theRequest);
}
return doTransaction(theRequestDetails, theRequest, theActionName, transactionType);
}
private Bundle doTransaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName, BundleTypeEnum theTransactionType) {
if (theTransactionType == null) {
String message = "Transaction Bundle did not specify valid Bundle.type, assuming " + BundleTypeEnum.TRANSACTION.getCode();
ourLog.warn(message);
theTransactionType = BundleTypeEnum.TRANSACTION;
}
if (theTransactionType != BundleTypeEnum.TRANSACTION) {
throw new InvalidRequestException("Unable to process transaction where incoming Bundle.type = " + theTransactionType.getCode());
}
ourLog.info("Beginning {} with {} resources", theActionName, theRequest.getEntry().size());
long start = System.currentTimeMillis();
TransactionDetails transactionDetails = new TransactionDetails();
Set<IdDt> allIds = new LinkedHashSet<IdDt>();
Map<IdDt, IdDt> idSubstitutions = new HashMap<IdDt, IdDt>();
Map<IdDt, DaoMethodOutcome> idToPersistedOutcome = new HashMap<IdDt, DaoMethodOutcome>();
/*
* We want to execute the transaction request bundle elements in the order
* specified by the FHIR specification (see TransactionSorter) so we save the
* original order in the request, then sort it.
*
* Entries with a type of GET are removed from the bundle so that they
* can be processed at the very end. We do this because the incoming resources
* are saved in a two-phase way in order to deal with interdependencies, and
* we want the GET processing to use the final indexing state
*/
Bundle response = new Bundle();
List<Entry> getEntries = new ArrayList<Entry>();
IdentityHashMap<Entry, Integer> originalRequestOrder = new IdentityHashMap<Entry, Integer>();
for (int i = 0; i < theRequest.getEntry().size(); i++) {
originalRequestOrder.put(theRequest.getEntry().get(i), i);
response.addEntry();
if (theRequest.getEntry().get(i).getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.GET) {
getEntries.add(theRequest.getEntry().get(i));
}
}
Collections.sort(theRequest.getEntry(), new TransactionSorter());
List<IIdType> deletedResources = new ArrayList<>();
DeleteConflictList deleteConflicts = new DeleteConflictList();
Map<Entry, IBasePersistedResource> entriesToProcess = new IdentityHashMap<>();
Set<IBasePersistedResource> nonUpdatedEntities = new HashSet<>();
Set<IBasePersistedResource> updatedEntities = new HashSet<>();
/*
* Handle: GET/PUT/POST
*/
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.execute(t->{
handleTransactionWriteOperations(theRequestDetails, theRequest, theActionName, transactionDetails, allIds, idSubstitutions, idToPersistedOutcome, response, originalRequestOrder, deletedResources, deleteConflicts, entriesToProcess, nonUpdatedEntities, updatedEntities);
return null;
});
/*
* Loop through the request and process any entries of type GET
*/
for (int i = 0; i < getEntries.size(); i++) {
Entry nextReqEntry = getEntries.get(i);
Integer originalOrder = originalRequestOrder.get(nextReqEntry);
Entry nextRespEntry = response.getEntry().get(originalOrder);
ServletSubRequestDetails requestDetails = new ServletSubRequestDetails(theRequestDetails);
requestDetails.setServletRequest(theRequestDetails.getServletRequest());
requestDetails.setRequestType(RequestTypeEnum.GET);
requestDetails.setServer(theRequestDetails.getServer());
String url = extractTransactionUrlOrThrowException(nextReqEntry, HTTPVerbEnum.GET);
int qIndex = url.indexOf('?');
ArrayListMultimap<String, String> paramValues = ArrayListMultimap.create();
requestDetails.setParameters(new HashMap<String, String[]>());
if (qIndex != -1) {
String params = url.substring(qIndex);
List<NameValuePair> parameters = UrlUtil.translateMatchUrl(params);
for (NameValuePair next : parameters) {
paramValues.put(next.getName(), next.getValue());
}
for (Map.Entry<String, Collection<String>> nextParamEntry : paramValues.asMap().entrySet()) {
String[] nextValue = nextParamEntry.getValue().toArray(new String[nextParamEntry.getValue().size()]);
requestDetails.addParameter(nextParamEntry.getKey(), nextValue);
}
url = url.substring(0, qIndex);
}
requestDetails.setRequestPath(url);
requestDetails.setFhirServerBase(theRequestDetails.getFhirServerBase());
theRequestDetails.getServer().populateRequestDetailsFromRequestPath(requestDetails, url);
BaseMethodBinding<?> method = theRequestDetails.getServer().determineResourceMethod(requestDetails, url);
if (method == null) {
throw new IllegalArgumentException("Unable to handle GET " + url);
}
if (isNotBlank(nextReqEntry.getRequest().getIfMatch())) {
requestDetails.addHeader(Constants.HEADER_IF_MATCH, nextReqEntry.getRequest().getIfMatch());
}
if (isNotBlank(nextReqEntry.getRequest().getIfNoneExist())) {
requestDetails.addHeader(Constants.HEADER_IF_NONE_EXIST, nextReqEntry.getRequest().getIfNoneExist());
}
if (isNotBlank(nextReqEntry.getRequest().getIfNoneMatch())) {
requestDetails.addHeader(Constants.HEADER_IF_NONE_MATCH, nextReqEntry.getRequest().getIfNoneMatch());
}
if (method instanceof BaseResourceReturningMethodBinding) {
try {
IBaseResource resource = ((BaseResourceReturningMethodBinding) method).doInvokeServer(theRequestDetails.getServer(), requestDetails);
if (paramValues.containsKey(Constants.PARAM_SUMMARY) || paramValues.containsKey(Constants.PARAM_CONTENT)) {
resource = filterNestedBundle(requestDetails, resource);
}
nextRespEntry.setResource((IResource) resource);
nextRespEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK));
} catch (NotModifiedException e) {
nextRespEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_304_NOT_MODIFIED));
}
} else {
throw new IllegalArgumentException("Unable to handle GET " + url);
}
}
for (Map.Entry<Entry, IBasePersistedResource> nextEntry : entriesToProcess.entrySet()) {
nextEntry.getKey().getResponse().setLocation(nextEntry.getValue().getIdDt().toUnqualified().getValue());
nextEntry.getKey().getResponse().setEtag(nextEntry.getValue().getIdDt().getVersionIdPart());
}
long delay = System.currentTimeMillis() - start;
int numEntries = theRequest.getEntry().size();
long delayPer = delay / numEntries;
ourLog.info("{} completed in {}ms ({} entries at {}ms per entry)", theActionName, delay, numEntries, delayPer);
response.setType(BundleTypeEnum.TRANSACTION_RESPONSE);
return response;
}
private void handleTransactionWriteOperations(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IdDt> theAllIds, Map<IdDt, IdDt> theIdSubstitutions, Map<IdDt, DaoMethodOutcome> theIdToPersistedOutcome, Bundle theResponse, IdentityHashMap<Entry, Integer> theOriginalRequestOrder, List<IIdType> theDeletedResources, DeleteConflictList theDeleteConflicts, Map<Entry, IBasePersistedResource> theEntriesToProcess, Set<IBasePersistedResource> theNonUpdatedEntities, Set<IBasePersistedResource> theUpdatedEntities) {
/*
* Loop through the request and process any entries of type
* PUT, POST or DELETE
*/
for (int i = 0; i < theRequest.getEntry().size(); i++) {
if (i % 100 == 0) {
ourLog.debug("Processed {} non-GET entries out of {}", i, theRequest.getEntry().size());
}
Entry nextReqEntry = theRequest.getEntry().get(i);
IResource res = nextReqEntry.getResource();
IdDt nextResourceId = null;
if (res != null) {
nextResourceId = res.getId();
if (!nextResourceId.hasIdPart()) {
if (isNotBlank(nextReqEntry.getFullUrl())) {
nextResourceId = new IdDt(nextReqEntry.getFullUrl());
}
}
if (nextResourceId.hasIdPart() && nextResourceId.getIdPart().matches("[a-zA-Z]+:.*") && !isPlaceholder(nextResourceId)) {
throw new InvalidRequestException("Invalid placeholder ID found: " + nextResourceId.getIdPart() + " - Must be of the form 'urn:uuid:[uuid]' or 'urn:oid:[oid]'");
}
if (nextResourceId.hasIdPart() && !nextResourceId.hasResourceType() && !isPlaceholder(nextResourceId)) {
nextResourceId = new IdDt(toResourceName(res.getClass()), nextResourceId.getIdPart());
res.setId(nextResourceId);
}
/*
* Ensure that the bundle doesn't have any duplicates, since this causes all kinds of weirdness
*/
if (isPlaceholder(nextResourceId)) {
if (!theAllIds.add(nextResourceId)) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionContainsMultipleWithDuplicateId", nextResourceId));
}
} else if (nextResourceId.hasResourceType() && nextResourceId.hasIdPart()) {
IdDt nextId = nextResourceId.toUnqualifiedVersionless();
if (!theAllIds.add(nextId)) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionContainsMultipleWithDuplicateId", nextId));
}
}
}
HTTPVerbEnum verb = nextReqEntry.getRequest().getMethodElement().getValueAsEnum();
if (verb == null) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionEntryHasInvalidVerb", nextReqEntry.getRequest().getMethod()));
}
String resourceType = res != null ? getContext().getResourceType(res) : null;
Entry nextRespEntry = theResponse.getEntry().get(theOriginalRequestOrder.get(nextReqEntry));
switch (verb) {
case POST: {
// CREATE
@SuppressWarnings("rawtypes")
IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass());
res.setId((String) null);
DaoMethodOutcome outcome;
outcome = resourceDao.create(res, nextReqEntry.getRequest().getIfNoneExist(), false, theTransactionDetails, theRequestDetails);
handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res);
theEntriesToProcess.put(nextRespEntry, outcome.getEntity());
if (outcome.getCreated() == false) {
theNonUpdatedEntities.add(outcome.getEntity());
}
break;
}
case DELETE: {
// DELETE
String url = extractTransactionUrlOrThrowException(nextReqEntry, verb);
UrlParts parts = UrlUtil.parseUrl(url);
IFhirResourceDao<? extends IBaseResource> dao = toDao(parts, verb.getCode(), url);
int status = Constants.STATUS_HTTP_204_NO_CONTENT;
if (parts.getResourceId() != null) {
DaoMethodOutcome outcome = dao.delete(new IdDt(parts.getResourceType(), parts.getResourceId()), theDeleteConflicts, theRequestDetails, theTransactionDetails);
if (outcome.getEntity() != null) {
theDeletedResources.add(outcome.getId().toUnqualifiedVersionless());
theEntriesToProcess.put(nextRespEntry, outcome.getEntity());
}
} else {
DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(parts.getResourceType() + '?' + parts.getParams(), theDeleteConflicts, theRequestDetails);
List<ResourceTable> allDeleted = deleteOutcome.getDeletedEntities();
for (ResourceTable deleted : allDeleted) {
theDeletedResources.add(deleted.getIdDt().toUnqualifiedVersionless());
}
if (allDeleted.isEmpty()) {
status = Constants.STATUS_HTTP_404_NOT_FOUND;
}
}
nextRespEntry.getResponse().setStatus(toStatusString(status));
break;
}
case PUT: {
// UPDATE
@SuppressWarnings("rawtypes")
IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass());
DaoMethodOutcome outcome;
String url = extractTransactionUrlOrThrowException(nextReqEntry, verb);
UrlParts parts = UrlUtil.parseUrl(url);
if (isNotBlank(parts.getResourceId())) {
res.setId(new IdDt(parts.getResourceType(), parts.getResourceId()));
outcome = resourceDao.update(res, null, false, theRequestDetails);
} else {
res.setId((String) null);
outcome = resourceDao.update(res, parts.getResourceType() + '?' + parts.getParams(), false, theRequestDetails);
}
if (outcome.getCreated() == Boolean.FALSE) {
theUpdatedEntities.add(outcome.getEntity());
}
handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res);
theEntriesToProcess.put(nextRespEntry, outcome.getEntity());
break;
}
case GET:
break;
}
}
/*
* Make sure that there are no conflicts from deletions. E.g. we can't delete something
* if something else has a reference to it.. Unless the thing that has a reference to it
* was also deleted as a part of this transaction, which is why we check this now at the
* end.
*/
theDeleteConflicts.removeIf(next -> theDeletedResources.contains(next.getTargetId().toVersionless()));
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), theDeleteConflicts);
/*
* Perform ID substitutions and then index each resource we have saved
*/
FhirTerser terser = getContext().newTerser();
for (DaoMethodOutcome nextOutcome : theIdToPersistedOutcome.values()) {
IResource nextResource = (IResource) nextOutcome.getResource();
if (nextResource == null) {
continue;
}
// References
List<BaseResourceReferenceDt> allRefs = terser.getAllPopulatedChildElementsOfType(nextResource, BaseResourceReferenceDt.class);
for (BaseResourceReferenceDt nextRef : allRefs) {
IdDt nextId = nextRef.getReference();
if (!nextId.hasIdPart()) {
continue;
}
if (theIdSubstitutions.containsKey(nextId)) {
IdDt newId = theIdSubstitutions.get(nextId);
ourLog.debug(" * Replacing resource ref {} with {}", nextId, newId);
nextRef.setReference(newId);
} else {
ourLog.debug(" * Reference [{}] does not exist in bundle", nextId);
}
}
// URIs
List<UriDt> allUris = terser.getAllPopulatedChildElementsOfType(nextResource, UriDt.class);
for (UriDt nextRef : allUris) {
if (nextRef instanceof IIdType) {
continue; // No substitution on the resource ID itself!
}
IdDt nextUriString = new IdDt(nextRef.getValueAsString());
if (theIdSubstitutions.containsKey(nextUriString)) {
IdDt newId = theIdSubstitutions.get(nextUriString);
ourLog.debug(" * Replacing resource ref {} with {}", nextUriString, newId);
nextRef.setValue(newId.getValue());
} else {
ourLog.debug(" * Reference [{}] does not exist in bundle", nextUriString);
}
}
InstantDt deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get(nextResource);
Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null;
if (theUpdatedEntities.contains(nextOutcome.getEntity())) {
updateInternal(theRequestDetails, nextResource, true, false, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource(), theTransactionDetails);
} else if (!theNonUpdatedEntities.contains(nextOutcome.getEntity())) {
updateEntity(theRequestDetails, nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, true, false, theTransactionDetails, false, true);
}
}
myEntityManager.flush();
/*
* Double check we didn't allow any duplicates we shouldn't have
*/
for (Entry nextEntry : theRequest.getEntry()) {
if (nextEntry.getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.POST) {
String matchUrl = nextEntry.getRequest().getIfNoneExist();
if (isNotBlank(matchUrl)) {
Class<? extends IBaseResource> resType = nextEntry.getResource().getClass();
Set<ResourcePersistentId> val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequestDetails);
if (val.size() > 1) {
throw new InvalidRequestException(
"Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?");
}
}
}
}
for (IdDt next : theAllIds) {
IdDt replacement = theIdSubstitutions.get(next);
if (replacement == null) {
continue;
}
if (replacement.equals(next)) {
continue;
}
ourLog.debug("Placeholder resource ID \"{}\" was replaced with permanent ID \"{}\"", next, replacement);
}
}
private String extractTransactionUrlOrThrowException(Entry nextEntry, HTTPVerbEnum verb) {
String url = nextEntry.getRequest().getUrl();
if (isBlank(url)) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionMissingUrl", verb.name()));
}
return url;
}
/**
* This method is called for nested bundles (e.g. if we received a transaction with an entry that
* was a GET search, this method is called on the bundle for the search result, that will be placed in the
* outer bundle). This method applies the _summary and _content parameters to the output of
* that bundle.
* <p>
* TODO: This isn't the most efficient way of doing this.. hopefully we can come up with something better in the future.
*/
private IBaseResource filterNestedBundle(RequestDetails theRequestDetails, IBaseResource theResource) {
IParser p = getContext().newJsonParser();
RestfulServerUtils.configureResponseParser(theRequestDetails, p);
return p.parseResource(theResource.getClass(), p.encodeResourceToString(theResource));
}
@Override @Override
public MetaDt metaGetOperation(RequestDetails theRequestDetails) { public MetaDt metaGetOperation(RequestDetails theRequestDetails) {
@ -589,31 +49,6 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao<Bundle, MetaDt> {
return retVal; return retVal;
} }
private IFhirResourceDao<? extends IBaseResource> toDao(UrlParts theParts, String theVerb, String theUrl) {
RuntimeResourceDefinition resType;
try {
resType = getContext().getResourceDefinition(theParts.getResourceType());
} catch (DataFormatException e) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
throw new InvalidRequestException(msg);
}
IFhirResourceDao<? extends IBaseResource> dao = null;
if (resType != null) {
dao = this.myDaoRegistry.getResourceDaoOrNull(resType.getImplementingClass());
}
if (dao == null) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
throw new InvalidRequestException(msg);
}
// if (theParts.getResourceId() == null && theParts.getParams() == null) {
// String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
// throw new InvalidRequestException(msg);
// }
return dao;
}
protected MetaDt toMetaDt(Collection<TagDefinition> tagDefinitions) { protected MetaDt toMetaDt(Collection<TagDefinition> tagDefinitions) {
MetaDt retVal = new MetaDt(); MetaDt retVal = new MetaDt();
for (TagDefinition next : tagDefinitions) { for (TagDefinition next : tagDefinitions) {
@ -632,105 +67,9 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao<Bundle, MetaDt> {
return retVal; return retVal;
} }
@Transactional(propagation = Propagation.NEVER)
@Override
public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
if (theRequestDetails != null) {
ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null);
notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails);
}
String actionName = "Transaction";
return transaction((ServletRequestDetails) theRequestDetails, theRequest, actionName);
}
private Bundle transaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) {
markRequestAsProcessingSubRequest(theRequestDetails);
try {
return doTransaction(theRequestDetails, theRequest, theActionName);
} finally {
clearRequestAsProcessingSubRequest(theRequestDetails);
}
}
private static void handleTransactionCreateOrUpdateOutcome(Map<IdDt, IdDt> idSubstitutions, Map<IdDt, DaoMethodOutcome> idToPersistedOutcome, IdDt nextResourceId, DaoMethodOutcome outcome,
Entry newEntry, String theResourceType, IResource theRes) {
IdDt newId = (IdDt) outcome.getId().toUnqualifiedVersionless();
IdDt resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless();
if (newId.equals(resourceId) == false) {
idSubstitutions.put(resourceId, newId);
if (isPlaceholder(resourceId)) {
/*
* The correct way for substitution IDs to be is to be with no resource type, but we'll accept the qualified kind too just to be lenient.
*/
idSubstitutions.put(new IdDt(theResourceType + '/' + resourceId.getValue()), newId);
}
}
idToPersistedOutcome.put(newId, outcome);
if (outcome.getCreated().booleanValue()) {
newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_201_CREATED));
} else {
newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK));
}
newEntry.getResponse().setLastModified(ResourceMetadataKeyEnum.UPDATED.get(theRes));
}
private static boolean isPlaceholder(IdDt theId) {
if (theId.getValue() != null) {
return theId.getValue().startsWith("urn:oid:") || theId.getValue().startsWith("urn:uuid:");
}
return false;
}
private static String toStatusString(int theStatusCode) {
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
}
@Override @Override
public IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) { public IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) {
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented(); return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
} }
/**
* Transaction Order, per the spec:
* <p>
* Process any DELETE interactions
* Process any POST interactions
* Process any PUT interactions
* Process any GET interactions
*/
public class TransactionSorter implements Comparator<Entry> {
@Override
public int compare(Entry theO1, Entry theO2) {
int o1 = toOrder(theO1);
int o2 = toOrder(theO2);
return o1 - o2;
}
private int toOrder(Entry theO1) {
int o1 = 0;
if (theO1.getRequest().getMethodElement().getValueAsEnum() != null) {
switch (theO1.getRequest().getMethodElement().getValueAsEnum()) {
case DELETE:
o1 = 1;
break;
case POST:
o1 = 2;
break;
case PUT:
o1 = 3;
break;
case GET:
o1 = 4;
break;
}
}
return o1;
}
}
} }

View File

@ -0,0 +1,171 @@
package ca.uhn.fhir.jpa.dao;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
import ca.uhn.fhir.model.dstu2.resource.Bundle;
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum;
import ca.uhn.fhir.model.dstu2.valueset.IssueTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.exceptions.FHIRException;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.Date;
import java.util.List;
public class TransactionProcessorVersionAdapterDstu2 implements TransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.Entry> {
@Override
public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) {
theBundleEntry.getResponse().setStatus(theStatus);
}
@Override
public void setResponseLastModified(Bundle.Entry theBundleEntry, Date theLastModified) {
theBundleEntry.getResponse().setLastModified(theLastModified, TemporalPrecisionEnum.MILLI);
}
@Override
public void setResource(Bundle.Entry theBundleEntry, IBaseResource theResource) {
theBundleEntry.setResource((IResource) theResource);
}
@Override
public IBaseResource getResource(Bundle.Entry theBundleEntry) {
return theBundleEntry.getResource();
}
@Override
public String getBundleType(Bundle theRequest) {
if (theRequest.getType() == null) {
return null;
}
return theRequest.getTypeElement().getValue();
}
@Override
public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.Entry theEntry) {
OperationOutcome oo = new OperationOutcome();
oo.addIssue()
.setSeverity(IssueSeverityEnum.ERROR)
.setDiagnostics(theCaughtEx.getMessage())
.setCode(IssueTypeEnum.EXCEPTION);
theEntry.setResource(oo);
}
@Override
public Bundle createBundle(String theBundleType) {
Bundle resp = new Bundle();
try {
resp.setType(BundleTypeEnum.forCode(theBundleType));
} catch (FHIRException theE) {
throw new InternalErrorException("Unknown bundle type: " + theBundleType);
}
return resp;
}
@Override
public List<Bundle.Entry> getEntries(Bundle theRequest) {
return theRequest.getEntry();
}
@Override
public void addEntry(Bundle theBundle, Bundle.Entry theEntry) {
theBundle.addEntry(theEntry);
}
@Override
public Bundle.Entry addEntry(Bundle theBundle) {
return theBundle.addEntry();
}
@Override
public String getEntryRequestVerb(FhirContext theContext, Bundle.Entry theEntry) {
String retVal = null;
HTTPVerbEnum value = theEntry.getRequest().getMethodElement().getValueAsEnum();
if (value != null) {
retVal = value.getCode();
}
return retVal;
}
@Override
public String getFullUrl(Bundle.Entry theEntry) {
return theEntry.getFullUrl();
}
@Override
public String getEntryIfNoneExist(Bundle.Entry theEntry) {
return theEntry.getRequest().getIfNoneExist();
}
@Override
public String getEntryRequestUrl(Bundle.Entry theEntry) {
return theEntry.getRequest().getUrl();
}
@Override
public void setResponseLocation(Bundle.Entry theEntry, String theResponseLocation) {
theEntry.getResponse().setLocation(theResponseLocation);
}
@Override
public void setResponseETag(Bundle.Entry theEntry, String theEtag) {
theEntry.getResponse().setEtag(theEtag);
}
@Override
public String getEntryRequestIfMatch(Bundle.Entry theEntry) {
return theEntry.getRequest().getIfMatch();
}
@Override
public String getEntryRequestIfNoneExist(Bundle.Entry theEntry) {
return theEntry.getRequest().getIfNoneExist();
}
@Override
public String getEntryRequestIfNoneMatch(Bundle.Entry theEntry) {
return theEntry.getRequest().getIfNoneMatch();
}
@Override
public void setResponseOutcome(Bundle.Entry theEntry, IBaseOperationOutcome theOperationOutcome) {
theEntry.setResource((IResource) theOperationOutcome);
}
@Override
public void setRequestVerb(Bundle.Entry theEntry, String theVerb) {
theEntry.getRequest().setMethod(HTTPVerbEnum.forCode(theVerb));
}
@Override
public void setRequestUrl(Bundle.Entry theEntry, String theUrl) {
theEntry.getRequest().setUrl(theUrl);
}
}

View File

@ -1,6 +1,6 @@
package ca.uhn.fhir.jpa.dao.data; package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice; import org.springframework.data.domain.Slice;
@ -38,13 +38,13 @@ public interface IBulkExportJobDao extends JpaRepository<BulkExportJobEntity, Lo
Optional<BulkExportJobEntity> findByJobId(@Param("jobid") String theUuid); Optional<BulkExportJobEntity> findByJobId(@Param("jobid") String theUuid);
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status") @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status")
Slice<BulkExportJobEntity> findByStatus(Pageable thePage, @Param("status") BulkJobStatusEnum theSubmitted); Slice<BulkExportJobEntity> findByStatus(Pageable thePage, @Param("status") BulkExportJobStatusEnum theSubmitted);
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff") @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff")
Slice<BulkExportJobEntity> findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff); Slice<BulkExportJobEntity> findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC") @Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC")
Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus); Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkExportJobStatusEnum theNotStatus);
@Modifying @Modifying
@Query("DELETE FROM BulkExportJobEntity t") @Query("DELETE FROM BulkExportJobEntity t")

View File

@ -0,0 +1,40 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.Optional;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IBulkImportJobDao extends JpaRepository<BulkImportJobEntity, Long> {
@Query("SELECT j FROM BulkImportJobEntity j WHERE j.myJobId = :jobid")
Optional<BulkImportJobEntity> findByJobId(@Param("jobid") String theUuid);
@Query("SELECT j FROM BulkImportJobEntity j WHERE j.myStatus = :status")
Slice<BulkImportJobEntity> findByStatus(Pageable thePage, @Param("status") BulkImportJobStatusEnum theStatus);
}

View File

@ -0,0 +1,43 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
import java.util.Optional;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IBulkImportJobFileDao extends JpaRepository<BulkImportJobFileEntity, Long> {
@Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC")
List<BulkImportJobFileEntity> findAllForJob(@Param("jobId") String theJobId);
@Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob = :job AND f.myFileSequence = :fileIndex")
Optional<BulkImportJobFileEntity> findForJob(@Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex);
@Query("SELECT f.myId FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC")
List<Long> findAllIdsForJob(@Param("jobId") String theJobId);
}

View File

@ -22,18 +22,13 @@ package ca.uhn.fhir.jpa.dao.dstu3;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import org.hl7.fhir.dstu3.model.Bundle; import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.dstu3.model.Meta; import org.hl7.fhir.dstu3.model.Meta;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import javax.persistence.TypedQuery; import javax.persistence.TypedQuery;
@ -42,14 +37,10 @@ import java.util.List;
public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao<Bundle, Meta> { public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao<Bundle, Meta> {
@Autowired
private TransactionProcessor myTransactionProcessor;
@Override @Override
@PostConstruct @PostConstruct
public void start() { public void start() {
super.start(); super.start();
myTransactionProcessor.setDao(this);
} }
@Override @Override
@ -88,12 +79,5 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao<Bundle, Meta> {
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented(); return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
} }
@Transactional(propagation = Propagation.NEVER)
@Override
public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
return myTransactionProcessor.transaction(theRequestDetails, theRequest);
}
} }

View File

@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.interceptor.api.HookParams; import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
import ca.uhn.fhir.jpa.entity.PartitionEntity; import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchInclude; import ca.uhn.fhir.jpa.entity.SearchInclude;
@ -123,6 +125,8 @@ public class ExpungeEverythingService {
counter.addAndGet(expungeEverythingByType(NpmPackageVersionEntity.class)); counter.addAndGet(expungeEverythingByType(NpmPackageVersionEntity.class));
counter.addAndGet(expungeEverythingByType(NpmPackageEntity.class)); counter.addAndGet(expungeEverythingByType(NpmPackageEntity.class));
counter.addAndGet(expungeEverythingByType(SearchParamPresent.class)); counter.addAndGet(expungeEverythingByType(SearchParamPresent.class));
counter.addAndGet(expungeEverythingByType(BulkImportJobFileEntity.class));
counter.addAndGet(expungeEverythingByType(BulkImportJobEntity.class));
counter.addAndGet(expungeEverythingByType(ForcedId.class)); counter.addAndGet(expungeEverythingByType(ForcedId.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamDate.class)); counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamDate.class));
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamNumber.class)); counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamNumber.class));

View File

@ -22,42 +22,20 @@ package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Meta; import org.hl7.fhir.r4.model.Meta;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import javax.persistence.TypedQuery; import javax.persistence.TypedQuery;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao<Bundle, Meta> { public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao<Bundle, Meta> {
@Autowired
private TransactionProcessor myTransactionProcessor;
@VisibleForTesting
public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) {
myTransactionProcessor = theTransactionProcessor;
}
@Override
@PostConstruct
public void start() {
super.start();
myTransactionProcessor.setDao(this);
}
@Override @Override
public Meta metaGetOperation(RequestDetails theRequestDetails) { public Meta metaGetOperation(RequestDetails theRequestDetails) {
// Notify interceptors // Notify interceptors
@ -95,10 +73,4 @@ public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao<Bundle, Meta> {
return retVal; return retVal;
} }
@Transactional(propagation = Propagation.NEVER)
@Override
public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
return myTransactionProcessor.transaction(theRequestDetails, theRequest);
}
} }

View File

@ -22,20 +22,14 @@ package ca.uhn.fhir.jpa.dao.r5;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2; import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails; import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.r5.model.Bundle; import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r5.model.Meta; import org.hl7.fhir.r5.model.Meta;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import javax.persistence.TypedQuery; import javax.persistence.TypedQuery;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
@ -44,17 +38,6 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao<Bundle, Meta> {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoR5.class); private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoR5.class);
@Autowired
private TransactionProcessor myTransactionProcessor;
@Override
@PostConstruct
public void start() {
super.start();
myTransactionProcessor.setDao(this);
}
@Override @Override
public Meta metaGetOperation(RequestDetails theRequestDetails) { public Meta metaGetOperation(RequestDetails theRequestDetails) {
// Notify interceptors // Notify interceptors
@ -92,10 +75,5 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao<Bundle, Meta> {
return retVal; return retVal;
} }
@Transactional(propagation = Propagation.NEVER)
@Override
public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
return myTransactionProcessor.transaction(theRequestDetails, theRequest);
}
} }

View File

@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.entity;
* #L% * #L%
*/ */
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.r5.model.InstantType; import org.hl7.fhir.r5.model.InstantType;
@ -70,7 +70,7 @@ public class BulkExportJobEntity implements Serializable {
@Enumerated(EnumType.STRING) @Enumerated(EnumType.STRING)
@Column(name = "JOB_STATUS", length = 10, nullable = false) @Column(name = "JOB_STATUS", length = 10, nullable = false)
private BulkJobStatusEnum myStatus; private BulkExportJobStatusEnum myStatus;
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
@Column(name = "CREATED_TIME", nullable = false) @Column(name = "CREATED_TIME", nullable = false)
private Date myCreated; private Date myCreated;
@ -156,11 +156,11 @@ public class BulkExportJobEntity implements Serializable {
return b.toString(); return b.toString();
} }
public BulkJobStatusEnum getStatus() { public BulkExportJobStatusEnum getStatus() {
return myStatus; return myStatus;
} }
public void setStatus(BulkJobStatusEnum theStatus) { public void setStatus(BulkExportJobStatusEnum theStatus) {
if (myStatus != theStatus) { if (myStatus != theStatus) {
myStatusTime = new Date(); myStatusTime = new Date();
myStatus = theStatus; myStatus = theStatus;

View File

@ -0,0 +1,157 @@
package ca.uhn.fhir.jpa.entity;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.UniqueConstraint;
import javax.persistence.Version;
import java.io.Serializable;
import java.util.Date;
import static org.apache.commons.lang3.StringUtils.left;
@Entity
@Table(name = "HFJ_BLK_IMPORT_JOB", uniqueConstraints = {
@UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID")
})
public class BulkImportJobEntity implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID")
@SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID")
@Column(name = "PID")
private Long myId;
@Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false, updatable = false)
private String myJobId;
@Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN)
private String myJobDescription;
@Enumerated(EnumType.STRING)
@Column(name = "JOB_STATUS", length = 10, nullable = false)
private BulkImportJobStatusEnum myStatus;
@Version
@Column(name = "OPTLOCK", nullable = false)
private int myVersion;
@Column(name = "FILE_COUNT", nullable = false)
private int myFileCount;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "STATUS_TIME", nullable = false)
private Date myStatusTime;
@Column(name = "STATUS_MESSAGE", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN)
private String myStatusMessage;
@Column(name = "ROW_PROCESSING_MODE", length = 20, nullable = false, updatable = false)
@Enumerated(EnumType.STRING)
private JobFileRowProcessingModeEnum myRowProcessingMode;
@Column(name = "BATCH_SIZE", nullable = false, updatable = false)
private int myBatchSize;
public String getJobDescription() {
return myJobDescription;
}
public void setJobDescription(String theJobDescription) {
myJobDescription = left(theJobDescription, BulkExportJobEntity.STATUS_MESSAGE_LEN);
}
public JobFileRowProcessingModeEnum getRowProcessingMode() {
return myRowProcessingMode;
}
public void setRowProcessingMode(JobFileRowProcessingModeEnum theRowProcessingMode) {
myRowProcessingMode = theRowProcessingMode;
}
public Date getStatusTime() {
return myStatusTime;
}
public void setStatusTime(Date theStatusTime) {
myStatusTime = theStatusTime;
}
public int getFileCount() {
return myFileCount;
}
public void setFileCount(int theFileCount) {
myFileCount = theFileCount;
}
public String getJobId() {
return myJobId;
}
public void setJobId(String theJobId) {
myJobId = theJobId;
}
public BulkImportJobStatusEnum getStatus() {
return myStatus;
}
/**
* Sets the status, updates the status time, and clears the status message
*/
public void setStatus(BulkImportJobStatusEnum theStatus) {
if (myStatus != theStatus) {
myStatus = theStatus;
setStatusTime(new Date());
setStatusMessage(null);
}
}
public String getStatusMessage() {
return myStatusMessage;
}
public void setStatusMessage(String theStatusMessage) {
myStatusMessage = left(theStatusMessage, BulkExportJobEntity.STATUS_MESSAGE_LEN);
}
public BulkImportJobJson toJson() {
return new BulkImportJobJson()
.setProcessingMode(getRowProcessingMode())
.setFileCount(getFileCount())
.setJobDescription(getJobDescription());
}
public int getBatchSize() {
return myBatchSize;
}
public void setBatchSize(int theBatchSize) {
myBatchSize = theBatchSize;
}
}

View File

@ -0,0 +1,104 @@
package ca.uhn.fhir.jpa.entity;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
@Entity
@Table(name = "HFJ_BLK_IMPORT_JOBFILE", indexes = {
@Index(name = "IDX_BLKIM_JOBFILE_JOBID", columnList = "JOB_PID")
})
public class BulkImportJobFileEntity implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOBFILE_PID")
@SequenceGenerator(name = "SEQ_BLKIMJOBFILE_PID", sequenceName = "SEQ_BLKIMJOBFILE_PID")
@Column(name = "PID")
private Long myId;
@ManyToOne
@JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_BLKIMJOBFILE_JOB"))
private BulkImportJobEntity myJob;
@Column(name = "FILE_SEQ", nullable = false)
private int myFileSequence;
@Lob
@Column(name = "JOB_CONTENTS", nullable = false)
private byte[] myContents;
@Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH)
private String myTenantName;
public BulkImportJobEntity getJob() {
return myJob;
}
public void setJob(BulkImportJobEntity theJob) {
myJob = theJob;
}
public int getFileSequence() {
return myFileSequence;
}
public void setFileSequence(int theFileSequence) {
myFileSequence = theFileSequence;
}
public String getContents() {
return new String(myContents, StandardCharsets.UTF_8);
}
public void setContents(String theContents) {
myContents = theContents.getBytes(StandardCharsets.UTF_8);
}
public BulkImportJobFileJson toJson() {
return new BulkImportJobFileJson()
.setContents(getContents())
.setTenantName(getTenantName());
}
public void setTenantName(String theTenantName) {
myTenantName = theTenantName;
}
public String getTenantName() {
return myTenantName;
}
}

View File

@ -0,0 +1,58 @@
package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.awaitility.Awaitility.await;
import static org.junit.jupiter.api.Assertions.fail;
public class BaseBatchJobR4Test extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(BaseBatchJobR4Test.class);
@Autowired
private JobExplorer myJobExplorer;
protected List<JobExecution> awaitAllBulkJobCompletions(String... theJobNames) {
assert theJobNames.length > 0;
List<JobInstance> bulkExport = new ArrayList<>();
for (String nextName : theJobNames) {
bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(nextName, 0, 100));
}
if (bulkExport.isEmpty()) {
List<String> wantNames = Arrays.asList(theJobNames);
List<String> haveNames = myJobExplorer.getJobNames();
fail("There are no jobs running - Want names " + wantNames + " and have names " + haveNames);
}
List<JobExecution> bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList());
awaitJobCompletions(bulkExportExecutions);
return bulkExportExecutions;
}
protected void awaitJobCompletions(Collection<JobExecution> theJobs) {
theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution));
}
protected void awaitJobCompletion(JobExecution theJobExecution) {
await().atMost(120, TimeUnit.SECONDS).until(() -> {
JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId());
ourLog.info("JobExecution {} currently has status: {}- Failures if any: {}", theJobExecution.getId(), jobExecution.getStatus(), jobExecution.getFailureExceptions());
return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED;
});
}
}

View File

@ -2,11 +2,11 @@ package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider; import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.client.apache.ResourceEntity; import ca.uhn.fhir.rest.client.apache.ResourceEntity;
@ -188,7 +188,7 @@ public class BulkDataExportProviderTest {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID) .setJobId(A_JOB_ID)
.setStatus(BulkJobStatusEnum.BUILDING) .setStatus(BulkExportJobStatusEnum.BUILDING)
.setStatusTime(InstantType.now().getValue()); .setStatusTime(InstantType.now().getValue());
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
@ -212,7 +212,7 @@ public class BulkDataExportProviderTest {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID) .setJobId(A_JOB_ID)
.setStatus(BulkJobStatusEnum.ERROR) .setStatus(BulkExportJobStatusEnum.ERROR)
.setStatusTime(InstantType.now().getValue()) .setStatusTime(InstantType.now().getValue())
.setStatusMessage("Some Error Message"); .setStatusMessage("Some Error Message");
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo); when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
@ -239,7 +239,7 @@ public class BulkDataExportProviderTest {
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo() IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
.setJobId(A_JOB_ID) .setJobId(A_JOB_ID)
.setStatus(BulkJobStatusEnum.COMPLETE) .setStatus(BulkExportJobStatusEnum.COMPLETE)
.setStatusTime(InstantType.now().getValue()); .setStatusTime(InstantType.now().getValue());
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111")); jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111"));
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222")); jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222"));

View File

@ -6,15 +6,14 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig; import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions; import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobParametersBuilder; import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobParametersBuilder;
import ca.uhn.fhir.jpa.bulk.job.GroupBulkExportJobParametersBuilder; import ca.uhn.fhir.jpa.bulk.export.job.GroupBulkExportJobParametersBuilder;
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum; import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao; import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity; import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
@ -46,28 +45,22 @@ import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.Job; import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobInstance;
import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.JobParametersInvalidException; import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Qualifier;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection;
import java.util.Date; import java.util.Date;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
@ -78,7 +71,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test { public class BulkDataExportSvcImplR4Test extends BaseBatchJobR4Test {
public static final String TEST_FILTER = "Patient?gender=female"; public static final String TEST_FILTER = "Patient?gender=female";
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class); private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportSvcImplR4Test.class);
@ -92,8 +85,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
private IBulkDataExportSvc myBulkDataExportSvc; private IBulkDataExportSvc myBulkDataExportSvc;
@Autowired @Autowired
private IBatchJobSubmitter myBatchJobSubmitter; private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired
private JobExplorer myJobExplorer;
@Autowired @Autowired
@Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME) @Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
@ -128,7 +119,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
String binaryId = myBinaryDao.create(b).getId().toUnqualifiedVersionless().getValue(); String binaryId = myBinaryDao.create(b).getId().toUnqualifiedVersionless().getValue();
BulkExportJobEntity job = new BulkExportJobEntity(); BulkExportJobEntity job = new BulkExportJobEntity();
job.setStatus(BulkJobStatusEnum.COMPLETE); job.setStatus(BulkExportJobStatusEnum.COMPLETE);
job.setExpiry(DateUtils.addHours(new Date(), -1)); job.setExpiry(DateUtils.addHours(new Date(), -1));
job.setJobId(UUID.randomUUID().toString()); job.setJobId(UUID.randomUUID().toString());
job.setCreated(new Date()); job.setCreated(new Date());
@ -241,6 +232,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
return options; return options;
} }
@Test @Test
public void testSubmit_ReusesExisting() { public void testSubmit_ReusesExisting() {
@ -278,7 +270,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status // Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
// Run a scheduled pass to build the export // Run a scheduled pass to build the export
myBulkDataExportSvc.buildExportFiles(); myBulkDataExportSvc.buildExportFiles();
@ -287,7 +279,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again // Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.ERROR, status.getStatus()); assertEquals(BulkExportJobStatusEnum.ERROR, status.getStatus());
assertThat(status.getStatusMessage(), containsString("help i'm a bug")); assertThat(status.getStatusMessage(), containsString("help i'm a bug"));
} finally { } finally {
@ -295,6 +287,14 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
} }
} }
private void awaitAllBulkJobCompletions() {
awaitAllBulkJobCompletions(
BatchJobsConfig.BULK_EXPORT_JOB_NAME,
BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME,
BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME
);
}
@Test @Test
public void testGenerateBulkExport_SpecificResources() { public void testGenerateBulkExport_SpecificResources() {
@ -313,7 +313,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status // Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_typeFilter=" + UrlUtil.escapeUrlParam(TEST_FILTER), status.getRequest()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_typeFilter=" + UrlUtil.escapeUrlParam(TEST_FILTER), status.getRequest());
// Run a scheduled pass to build the export // Run a scheduled pass to build the export
@ -323,7 +323,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again // Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
// Iterate over the files // Iterate over the files
for (IBulkDataExportSvc.FileEntry next : status.getFiles()) { for (IBulkDataExportSvc.FileEntry next : status.getFiles()) {
@ -368,7 +368,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status // Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson", status.getRequest()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson", status.getRequest());
// Run a scheduled pass to build the export // Run a scheduled pass to build the export
@ -378,7 +378,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again // Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
assertEquals(5, status.getFiles().size()); assertEquals(5, status.getFiles().size());
// Iterate over the files // Iterate over the files
@ -393,7 +393,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
} else if ("Observation".equals(next.getResourceType())) { } else if ("Observation".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n")); assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n"));
assertEquals(26, nextContents.split("\n").length); assertEquals(26, nextContents.split("\n").length);
}else if ("Immunization".equals(next.getResourceType())) { } else if ("Immunization".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"patient\":{\"reference\":\"Patient/PAT0\"}}\n")); assertThat(nextContents, containsString("\"patient\":{\"reference\":\"Patient/PAT0\"}}\n"));
assertEquals(26, nextContents.split("\n").length); assertEquals(26, nextContents.split("\n").length);
} else if ("CareTeam".equals(next.getResourceType())) { } else if ("CareTeam".equals(next.getResourceType())) {
@ -428,7 +428,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(5)); assertThat(jobInfo.getFiles().size(), equalTo(5));
} }
@ -451,7 +451,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status // Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Patient&_typeFilter=Patient%3F_has%3AObservation%3Apatient%3Aidentifier%3DSYS%7CVAL3", status.getRequest()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Patient&_typeFilter=Patient%3F_has%3AObservation%3Apatient%3Aidentifier%3DSYS%7CVAL3", status.getRequest());
// Run a scheduled pass to build the export // Run a scheduled pass to build the export
@ -461,7 +461,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again // Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
assertEquals(1, status.getFiles().size()); assertEquals(1, status.getFiles().size());
// Iterate over the files // Iterate over the files
@ -481,7 +481,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
} }
@Test @Test
public void testGenerateBulkExport_WithSince() throws InterruptedException { public void testGenerateBulkExport_WithSince() {
// Create some resources to load // Create some resources to load
createResources(); createResources();
@ -508,7 +508,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Check the status // Check the status
IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.SUBMITTED, status.getStatus()); assertEquals(BulkExportJobStatusEnum.SUBMITTED, status.getStatus());
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_since=" + cutoff.setTimeZoneZulu(true).getValueAsString(), status.getRequest()); assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Patient&_since=" + cutoff.setTimeZoneZulu(true).getValueAsString(), status.getRequest());
// Run a scheduled pass to build the export // Run a scheduled pass to build the export
@ -518,7 +518,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again // Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus()); assertEquals(BulkExportJobStatusEnum.COMPLETE, status.getStatus());
assertEquals(1, status.getFiles().size()); assertEquals(1, status.getFiles().size());
// Iterate over the files // Iterate over the files
@ -560,24 +560,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
String jobUUID = (String) jobExecution.getExecutionContext().get("jobUUID"); String jobUUID = (String) jobExecution.getExecutionContext().get("jobUUID");
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobUUID); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobUUID);
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().size(), equalTo(2));
} }
public void awaitAllBulkJobCompletions() {
List<JobInstance> bulkExport = myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.BULK_EXPORT_JOB_NAME, 0, 100);
bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME, 0, 100));
bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME, 0, 100));
if (bulkExport.isEmpty()) {
fail("There are no bulk export jobs running!");
}
List<JobExecution> bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList());
awaitJobCompletions(bulkExportExecutions);
}
public void awaitJobCompletions(Collection<JobExecution> theJobs) {
theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution));
}
@Test @Test
public void testBatchJobSubmitsAndRuns() throws Exception { public void testBatchJobSubmitsAndRuns() throws Exception {
@ -599,13 +585,13 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
awaitJobCompletion(jobExecution); awaitJobCompletion(jobExecution);
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().size(), equalTo(2));
} }
@Test @Test
public void testGroupBatchJobWorks() throws Exception { public void testGroupBatchJobWorks() {
createResources(); createResources();
// Create a bulk job // Create a bulk job
@ -625,7 +611,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@ -639,8 +625,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
assertThat(nextContents, is(containsString("IMM6"))); assertThat(nextContents, is(containsString("IMM6")));
assertThat(nextContents, is(containsString("IMM8"))); assertThat(nextContents, is(containsString("IMM8")));
} }
@Test @Test
public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() throws Exception { public void testGroupBatchJobMdmExpansionIdentifiesGoldenResources() {
createResources(); createResources();
// Create a bulk job // Create a bulk job
@ -659,7 +646,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().size(), equalTo(2));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@ -716,7 +703,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
awaitJobCompletion(jobExecution); awaitJobCompletion(jobExecution);
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().size(), equalTo(2));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@ -747,7 +734,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// CareTeam has two patient references: participant and patient. This test checks if we find the patient if participant is null but patient is not null // CareTeam has two patient references: participant and patient. This test checks if we find the patient if participant is null but patient is not null
@Test @Test
public void testGroupBatchJobCareTeam() throws Exception { public void testGroupBatchJobCareTeam() {
createResources(); createResources();
BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions(); BulkDataExportOptions bulkDataExportOptions = new BulkDataExportOptions();
@ -766,7 +753,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("CareTeam"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("CareTeam")));
@ -810,7 +797,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@ -847,7 +834,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Observation"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Observation")));
String nextContents = getBinaryContents(jobInfo, 0); String nextContents = getBinaryContents(jobInfo, 0);
@ -888,7 +875,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
awaitAllBulkJobCompletions(); awaitAllBulkJobCompletions();
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Patient"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Patient")));
@ -900,7 +887,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
} }
@Test @Test
public void testMdmExpansionWorksForGroupExportOnMatchedPatients() throws JobParametersInvalidException { public void testMdmExpansionWorksForGroupExportOnMatchedPatients() {
createResources(); createResources();
// Create a bulk job // Create a bulk job
@ -918,9 +905,9 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
awaitAllBulkJobCompletions(); awaitAllBulkJobCompletions();
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId +"&_mdm=true", jobInfo.getRequest()); assertEquals("/Group/G0/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId + "&_mdm=true", jobInfo.getRequest());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(2)); assertThat(jobInfo.getFiles().size(), equalTo(2));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@ -963,7 +950,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
} }
@Test @Test
public void testGroupBulkExportSupportsTypeFilters() throws JobParametersInvalidException { public void testGroupBulkExportSupportsTypeFilters() {
createResources(); createResources();
//Only get COVID-19 vaccinations //Only get COVID-19 vaccinations
@ -985,7 +972,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE)); assertThat(jobInfo.getStatus(), equalTo(BulkExportJobStatusEnum.COMPLETE));
assertThat(jobInfo.getFiles().size(), equalTo(1)); assertThat(jobInfo.getFiles().size(), equalTo(1));
assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization"))); assertThat(jobInfo.getFiles().get(0).getResourceType(), is(equalTo("Immunization")));
@ -1021,7 +1008,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
myBulkDataExportSvc.buildExportFiles(); myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions(); awaitAllBulkJobCompletions();
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE)));
//Group-style //Group-style
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP); bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
@ -1030,7 +1017,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
myBulkDataExportSvc.buildExportFiles(); myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions(); awaitAllBulkJobCompletions();
jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE)));
//System-style //System-style
bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM); bulkDataExportOptions.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
@ -1038,7 +1025,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
myBulkDataExportSvc.buildExportFiles(); myBulkDataExportSvc.buildExportFiles();
awaitAllBulkJobCompletions(); awaitAllBulkJobCompletions();
jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId()); jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertThat(jobInfo.getStatus(), is(equalTo(BulkJobStatusEnum.COMPLETE))); assertThat(jobInfo.getStatus(), is(equalTo(BulkExportJobStatusEnum.COMPLETE)));
} }
@Test @Test
@ -1077,14 +1064,6 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
} }
private void awaitJobCompletion(JobExecution theJobExecution) {
await().atMost(120, TimeUnit.SECONDS).until(() -> {
JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId());
ourLog.info("JobExecution {} currently has status: {}", theJobExecution.getId(), jobExecution.getStatus());
return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED;
});
}
private void createResources() { private void createResources() {
Group group = new Group(); Group group = new Group();
group.setId("G0"); group.setId("G0");
@ -1109,7 +1088,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
linkToGoldenResource(goldenPid, sourcePid); linkToGoldenResource(goldenPid, sourcePid);
//Only add half the patients to the group. //Only add half the patients to the group.
if (i % 2 == 0 ) { if (i % 2 == 0) {
group.addMember().setEntity(new Reference(patId)); group.addMember().setEntity(new Reference(patId));
} }
@ -1153,8 +1132,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
patient.setGender(i % 2 == 0 ? Enumerations.AdministrativeGender.MALE : Enumerations.AdministrativeGender.FEMALE); patient.setGender(i % 2 == 0 ? Enumerations.AdministrativeGender.MALE : Enumerations.AdministrativeGender.FEMALE);
patient.addName().setFamily("FAM" + i); patient.addName().setFamily("FAM" + i);
patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i); patient.addIdentifier().setSystem("http://mrns").setValue("PAT" + i);
DaoMethodOutcome patientOutcome = myPatientDao.update(patient); return myPatientDao.update(patient);
return patientOutcome;
} }
private void createCareTeamWithIndex(int i, IIdType patId) { private void createCareTeamWithIndex(int i, IIdType patId) {
@ -1167,7 +1145,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
private void createImmunizationWithIndex(int i, IIdType patId) { private void createImmunizationWithIndex(int i, IIdType patId) {
Immunization immunization = new Immunization(); Immunization immunization = new Immunization();
immunization.setId("IMM" + i); immunization.setId("IMM" + i);
if (patId != null ) { if (patId != null) {
immunization.setPatient(new Reference(patId)); immunization.setPatient(new Reference(patId));
} }
if (i % 2 == 0) { if (i % 2 == 0) {

View File

@ -0,0 +1,155 @@
package ca.uhn.fhir.jpa.bulk.imprt.svc;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.bulk.BaseBatchJobR4Test;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.springframework.batch.core.JobExecution;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class BulkDataImportR4Test extends BaseBatchJobR4Test implements ITestDataBuilder {
@Autowired
private IBulkDataImportSvc mySvc;
@Autowired
private IBulkImportJobDao myBulkImportJobDao;
@Autowired
private IBulkImportJobFileDao myBulkImportJobFileDao;
@AfterEach
public void after() {
myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof IAnonymousInterceptor);
}
@Test
public void testFlow_TransactionRows() {
int transactionsPerFile = 10;
int fileCount = 10;
List<BulkImportJobFileJson> files = createInputFiles(transactionsPerFile, fileCount);
BulkImportJobJson job = new BulkImportJobJson();
job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
job.setJobDescription("This is the description");
job.setBatchSize(3);
String jobId = mySvc.createNewJob(job, files);
mySvc.markJobAsReadyForActivation(jobId);
boolean activateJobOutcome = mySvc.activateNextReadyJob();
assertTrue(activateJobOutcome);
List<JobExecution> executions = awaitAllBulkJobCompletions();
assertEquals(1, executions.size());
assertEquals("This is the description", executions.get(0).getJobParameters().getString(BulkExportJobConfig.JOB_DESCRIPTION));
runInTransaction(() -> {
List<BulkImportJobEntity> jobs = myBulkImportJobDao.findAll();
assertEquals(0, jobs.size());
List<BulkImportJobFileEntity> jobFiles = myBulkImportJobFileDao.findAll();
assertEquals(0, jobFiles.size());
});
IBundleProvider searchResults = myPatientDao.search(SearchParameterMap.newSynchronous());
assertEquals(transactionsPerFile * fileCount, searchResults.sizeOrThrowNpe());
}
@Test
public void testFlow_WithTenantNamesInInput() {
int transactionsPerFile = 5;
int fileCount = 10;
List<BulkImportJobFileJson> files = createInputFiles(transactionsPerFile, fileCount);
for (int i = 0; i < fileCount; i++) {
files.get(i).setTenantName("TENANT" + i);
}
IAnonymousInterceptor interceptor = mock(IAnonymousInterceptor.class);
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, interceptor);
BulkImportJobJson job = new BulkImportJobJson();
job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
job.setBatchSize(5);
String jobId = mySvc.createNewJob(job, files);
mySvc.markJobAsReadyForActivation(jobId);
boolean activateJobOutcome = mySvc.activateNextReadyJob();
assertTrue(activateJobOutcome);
awaitAllBulkJobCompletions();
ArgumentCaptor<HookParams> paramsCaptor = ArgumentCaptor.forClass(HookParams.class);
verify(interceptor, times(50)).invoke(any(), paramsCaptor.capture());
List<String> tenantNames = paramsCaptor
.getAllValues()
.stream()
.map(t -> t.get(RequestDetails.class).getTenantId())
.distinct()
.sorted()
.collect(Collectors.toList());
assertThat(tenantNames, containsInAnyOrder(
"TENANT0", "TENANT1", "TENANT2", "TENANT3", "TENANT4", "TENANT5", "TENANT6", "TENANT7", "TENANT8", "TENANT9"
));
}
@Nonnull
private List<BulkImportJobFileJson> createInputFiles(int transactionsPerFile, int fileCount) {
List<BulkImportJobFileJson> files = new ArrayList<>();
for (int fileIndex = 0; fileIndex < fileCount; fileIndex++) {
StringBuilder fileContents = new StringBuilder();
for (int transactionIdx = 0; transactionIdx < transactionsPerFile; transactionIdx++) {
BundleBuilder bundleBuilder = new BundleBuilder(myFhirCtx);
IBaseResource patient = buildPatient(withFamily("FAM " + fileIndex + " " + transactionIdx));
bundleBuilder.addTransactionCreateEntry(patient);
fileContents.append(myFhirCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(bundleBuilder.getBundle()));
fileContents.append("\n");
}
BulkImportJobFileJson nextFile = new BulkImportJobFileJson();
nextFile.setContents(fileContents.toString());
files.add(nextFile);
}
return files;
}
protected List<JobExecution> awaitAllBulkJobCompletions() {
return awaitAllBulkJobCompletions(BatchJobsConfig.BULK_IMPORT_JOB_NAME);
}
}

View File

@ -0,0 +1,145 @@
package ca.uhn.fhir.jpa.bulk.imprt.svc;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.blankString;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class BulkDataImportSvcImplTest extends BaseJpaR4Test {
@Autowired
private IBulkDataImportSvc mySvc;
@Autowired
private IBulkImportJobDao myBulkImportJobDao;
@Autowired
private IBulkImportJobFileDao myBulkImportJobFileDao;
@Test
public void testCreateNewJob() {
// Create job
BulkImportJobJson job = new BulkImportJobJson();
job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
job.setBatchSize(3);
BulkImportJobFileJson file1 = new BulkImportJobFileJson();
file1.setContents("contents 1");
BulkImportJobFileJson file2 = new BulkImportJobFileJson();
file2.setContents("contents 2");
String jobId = mySvc.createNewJob(job, Lists.newArrayList(file1, file2));
assertThat(jobId, not(blankString()));
// Add file
BulkImportJobFileJson file3 = new BulkImportJobFileJson();
file3.setContents("contents 3");
mySvc.addFilesToJob(jobId, Lists.newArrayList(file3));
runInTransaction(() -> {
List<BulkImportJobEntity> jobs = myBulkImportJobDao.findAll();
assertEquals(1, jobs.size());
assertEquals(jobId, jobs.get(0).getJobId());
assertEquals(3, jobs.get(0).getFileCount());
assertEquals(BulkImportJobStatusEnum.STAGING, jobs.get(0).getStatus());
List<BulkImportJobFileEntity> files = myBulkImportJobFileDao.findAllForJob(jobId);
assertEquals(3, files.size());
});
}
@Test
public void testCreateNewJob_InvalidJob_NoContents() {
BulkImportJobJson job = new BulkImportJobJson();
job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
job.setBatchSize(3);
BulkImportJobFileJson file1 = new BulkImportJobFileJson();
try {
mySvc.createNewJob(job, Lists.newArrayList(file1));
} catch (UnprocessableEntityException e) {
assertEquals("Job File Contents mode must not be null", e.getMessage());
}
}
@Test
public void testCreateNewJob_InvalidJob_NoProcessingMode() {
BulkImportJobJson job = new BulkImportJobJson();
job.setProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
job.setBatchSize(3);
BulkImportJobFileJson file1 = new BulkImportJobFileJson();
file1.setContents("contents 1");
try {
mySvc.createNewJob(job, Lists.newArrayList(file1));
} catch (UnprocessableEntityException e) {
assertEquals("Job File Processing mode must not be null", e.getMessage());
}
}
@Test
public void testAddFilesToJob_InvalidId() {
BulkImportJobFileJson file3 = new BulkImportJobFileJson();
file3.setContents("contents 3");
try {
mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3));
} catch (InvalidRequestException e) {
assertEquals("Unknown job ID: ABCDEFG", e.getMessage());
}
}
@Test
public void testAddFilesToJob_WrongStatus() {
runInTransaction(() -> {
BulkImportJobEntity entity = new BulkImportJobEntity();
entity.setFileCount(1);
entity.setJobId("ABCDEFG");
entity.setStatus(BulkImportJobStatusEnum.RUNNING);
entity.setRowProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
myBulkImportJobDao.save(entity);
});
BulkImportJobFileJson file3 = new BulkImportJobFileJson();
file3.setContents("contents 3");
try {
mySvc.addFilesToJob("ABCDEFG", Lists.newArrayList(file3));
} catch (InvalidRequestException e) {
assertEquals("Job ABCDEFG has status RUNNING and can not be added to", e.getMessage());
}
}
@Test
public void testActivateJob() {
runInTransaction(() -> {
BulkImportJobEntity entity = new BulkImportJobEntity();
entity.setFileCount(1);
entity.setJobId("ABCDEFG");
entity.setStatus(BulkImportJobStatusEnum.STAGING);
entity.setRowProcessingMode(JobFileRowProcessingModeEnum.FHIR_TRANSACTION);
myBulkImportJobDao.save(entity);
});
mySvc.markJobAsReadyForActivation("ABCDEFG");
runInTransaction(() -> {
List<BulkImportJobEntity> jobs = myBulkImportJobDao.findAll();
assertEquals(1, jobs.size());
assertEquals(BulkImportJobStatusEnum.READY, jobs.get(0).getStatus());
});
}
}

View File

@ -9,7 +9,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions; import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.BaseConfig; import ca.uhn.fhir.jpa.config.BaseConfig;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;

View File

@ -85,7 +85,7 @@ public class TransactionProcessorTest {
.setUrl("/MedicationKnowledge"); .setUrl("/MedicationKnowledge");
try { try {
myTransactionProcessor.transaction(null, input); myTransactionProcessor.transaction(null, input, false);
fail(); fail();
} catch (InvalidRequestException e) { } catch (InvalidRequestException e) {
assertEquals("Resource MedicationKnowledge is not supported on this server. Supported resource types: []", e.getMessage()); assertEquals("Resource MedicationKnowledge is not supported on this server. Supported resource types: []", e.getMessage());

View File

@ -8,7 +8,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoSubscription;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestDstu2Config; import ca.uhn.fhir.jpa.config.TestDstu2Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;

View File

@ -785,7 +785,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
// try { // try {
Bundle resp = mySystemDao.transaction(mySrd, request); Bundle resp = mySystemDao.transaction(mySrd, request);
assertEquals(1, resp.getEntry().size()); assertEquals(1, resp.getEntry().size());
assertEquals("404 Not Found", resp.getEntry().get(0).getResponse().getStatus()); assertEquals("204 No Content", resp.getEntry().get(0).getResponse().getStatus());
// fail(); // fail();
// } catch (ResourceNotFoundException e) { // } catch (ResourceNotFoundException e) {
@ -1159,11 +1159,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
} }
assertEquals("201 Created", resp.getEntry().get(2).getResponse().getStatus()); assertEquals("201 Created", resp.getEntry().get(2).getResponse().getStatus());
assertThat(resp.getEntry().get(2).getResponse().getLocation(), startsWith("Patient/")); assertThat(resp.getEntry().get(2).getResponse().getLocation(), startsWith("Patient/"));
if (pass == 0) {
assertEquals("404 Not Found", resp.getEntry().get(3).getResponse().getStatus());
} else {
assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus()); assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus());
}
Bundle respGetBundle = (Bundle) resp.getEntry().get(0).getResource(); Bundle respGetBundle = (Bundle) resp.getEntry().get(0).getResource();
assertEquals(1, respGetBundle.getEntry().size()); assertEquals(1, respGetBundle.getEntry().size());

View File

@ -13,7 +13,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestDstu3Config; import ca.uhn.fhir.jpa.config.TestDstu3Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;

View File

@ -17,7 +17,7 @@ import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter; import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4Config; import ca.uhn.fhir.jpa.config.TestR4Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
@ -51,24 +51,19 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc; import ca.uhn.fhir.jpa.packages.IPackageInstallerSvc;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.provider.r4.BaseJpaResourceProviderObservationR4;
import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4; import ca.uhn.fhir.jpa.provider.r4.JpaSystemProviderR4;
import ca.uhn.fhir.jpa.rp.r4.ObservationResourceProvider;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc; import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
@ -77,7 +72,6 @@ import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl; import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl; import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc; import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
@ -95,11 +89,9 @@ import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
import ca.uhn.fhir.test.utilities.ITestDataBuilder; import ca.uhn.fhir.test.utilities.ITestDataBuilder;
import ca.uhn.fhir.util.ClasspathUtil; import ca.uhn.fhir.util.ClasspathUtil;
import ca.uhn.fhir.util.ResourceUtil;
import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.FhirValidator;
import ca.uhn.fhir.validation.ValidationResult; import ca.uhn.fhir.validation.ValidationResult;
import org.apache.commons.io.IOUtils;
import org.hibernate.search.mapper.orm.Search; import org.hibernate.search.mapper.orm.Search;
import org.hibernate.search.mapper.orm.session.SearchSession; import org.hibernate.search.mapper.orm.session.SearchSession;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport; import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
@ -168,7 +160,6 @@ import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.r5.utils.IResourceValidator; import org.hl7.fhir.r5.utils.IResourceValidator;
import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@ -182,7 +173,6 @@ import org.springframework.transaction.PlatformTransactionManager;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;

View File

@ -39,7 +39,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch;
import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;

View File

@ -7,7 +7,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4WithLuceneDisabledConfig; import ca.uhn.fhir.jpa.config.TestR4WithLuceneDisabledConfig;
import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest; import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;

View File

@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch; import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch;
import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;

View File

@ -18,6 +18,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
@ -60,10 +61,10 @@ import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.Reference; import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.Resource; import org.hl7.fhir.r4.model.Resource;
import org.hl7.fhir.r4.model.ValueSet; import org.hl7.fhir.r4.model.ValueSet;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionCallback;
@ -568,9 +569,9 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
* so it indexes the newest resource one more time. It wouldn't be a big deal * so it indexes the newest resource one more time. It wouldn't be a big deal
* if this ever got fixed so that it ends up with 2 instead of 3. * if this ever got fixed so that it ends up with 2 instead of 3.
*/ */
runInTransaction(()->{ runInTransaction(() -> {
Optional<Integer> reindexCount = myResourceReindexJobDao.getReindexCount(jobId); Optional<Integer> reindexCount = myResourceReindexJobDao.getReindexCount(jobId);
assertEquals(3, reindexCount.orElseThrow(()->new NullPointerException("No job " + jobId)).intValue()); assertEquals(3, reindexCount.orElseThrow(() -> new NullPointerException("No job " + jobId)).intValue());
}); });
// Try making the resource unparseable // Try making the resource unparseable
@ -626,7 +627,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
searchParamMap.add(Patient.SP_FAMILY, new StringParam("family2")); searchParamMap.add(Patient.SP_FAMILY, new StringParam("family2"));
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
runInTransaction(()->{ runInTransaction(() -> {
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3); ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3);
assertNotNull(historyEntry); assertNotNull(historyEntry);
myResourceHistoryTableDao.delete(historyEntry); myResourceHistoryTableDao.delete(historyEntry);
@ -656,7 +657,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1")); searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1"));
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
runInTransaction(()->{ runInTransaction(() -> {
myEntityManager myEntityManager
.createQuery("UPDATE ResourceIndexedSearchParamString s SET s.myHashNormalizedPrefix = 0") .createQuery("UPDATE ResourceIndexedSearchParamString s SET s.myHashNormalizedPrefix = 0")
.executeUpdate(); .executeUpdate();
@ -671,7 +672,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
myResourceReindexingSvc.markAllResourcesForReindexing(); myResourceReindexingSvc.markAllResourcesForReindexing();
myResourceReindexingSvc.forceReindexingPass(); myResourceReindexingSvc.forceReindexingPass();
runInTransaction(()->{ runInTransaction(() -> {
ResourceIndexedSearchParamString param = myResourceIndexedSearchParamStringDao.findAll() ResourceIndexedSearchParamString param = myResourceIndexedSearchParamStringDao.findAll()
.stream() .stream()
.filter(t -> t.getParamName().equals("family")) .filter(t -> t.getParamName().equals("family"))
@ -694,7 +695,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1")); searchParamMap.add(Patient.SP_FAMILY, new StringParam("family1"));
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue()); assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
runInTransaction(()->{ runInTransaction(() -> {
Long i = myEntityManager Long i = myEntityManager
.createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class) .createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class)
.getSingleResult(); .getSingleResult();
@ -714,7 +715,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
myResourceReindexingSvc.markAllResourcesForReindexing(); myResourceReindexingSvc.markAllResourcesForReindexing();
myResourceReindexingSvc.forceReindexingPass(); myResourceReindexingSvc.forceReindexingPass();
runInTransaction(()->{ runInTransaction(() -> {
Long i = myEntityManager Long i = myEntityManager
.createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class) .createQuery("SELECT count(s) FROM ResourceIndexedSearchParamString s WHERE s.myHashIdentity = 0", Long.class)
.getSingleResult(); .getSingleResult();
@ -808,6 +809,30 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
assertEquals("201 Created", resp.getEntry().get(0).getResponse().getStatus()); assertEquals("201 Created", resp.getEntry().get(0).getResponse().getStatus());
} }
@Test
public void testNestedTransaction_ReadsBlocked() {
String methodName = "testTransactionBatchWithFailingRead";
Bundle request = new Bundle();
request.setType(BundleType.TRANSACTION);
Patient p = new Patient();
p.addName().setFamily(methodName);
request.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST);
request.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl("Patient?identifier=foo");
try {
runInTransaction(()->{
mySystemDao.transactionNested(mySrd, request);
});
fail();
} catch (InvalidRequestException e) {
assertEquals("Can not invoke read operation on nested transaction", e.getMessage());
}
}
@Test @Test
public void testTransactionBatchWithFailingRead() { public void testTransactionBatchWithFailingRead() {
String methodName = "testTransactionBatchWithFailingRead"; String methodName = "testTransactionBatchWithFailingRead";
@ -923,8 +948,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
Bundle outcome = mySystemDao.transaction(mySrd, request); Bundle outcome = mySystemDao.transaction(mySrd, request);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus()); assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus());
assertEquals(IssueSeverity.ERROR, ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); assertEquals(IssueSeverity.ERROR, ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity());
assertEquals("Missing required resource in Bundle.entry[0].resource for operation POST", ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); assertEquals("Missing required resource in Bundle.entry[0].resource for operation POST", ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics());
validate(outcome); validate(outcome);
} }
@ -942,8 +967,8 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
Bundle outcome = mySystemDao.transaction(mySrd, request); Bundle outcome = mySystemDao.transaction(mySrd, request);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome)); ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus()); assertEquals("400 Bad Request", outcome.getEntry().get(0).getResponse().getStatus());
assertEquals(IssueSeverity.ERROR, ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity()); assertEquals(IssueSeverity.ERROR, ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getSeverity());
assertEquals("Missing required resource in Bundle.entry[0].resource for operation PUT", ((OperationOutcome)outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics()); assertEquals("Missing required resource in Bundle.entry[0].resource for operation PUT", ((OperationOutcome) outcome.getEntry().get(0).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics());
validate(outcome); validate(outcome);
} }
@ -2272,7 +2297,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
patient2.addIdentifier().setSystem("urn:system").setValue("testPersistWithSimpleLinkP02"); patient2.addIdentifier().setSystem("urn:system").setValue("testPersistWithSimpleLinkP02");
request.addEntry().setResource(patient2).getRequest().setMethod(HTTPVerb.POST); request.addEntry().setResource(patient2).getRequest().setMethod(HTTPVerb.POST);
assertThrows(InvalidRequestException.class, ()->{ assertThrows(InvalidRequestException.class, () -> {
mySystemDao.transaction(mySrd, request); mySystemDao.transaction(mySrd, request);
}); });
} }

View File

@ -16,7 +16,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc; import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider; import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor; import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.config.TestR5Config; import ca.uhn.fhir.jpa.config.TestR5Config;
import ca.uhn.fhir.jpa.dao.BaseJpaTest; import ca.uhn.fhir.jpa.dao.BaseJpaTest;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;

Some files were not shown because too many files have changed in this diff Show More