mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-03-28 10:58:47 +00:00
Merge remote-tracking branch 'origin/master' into issue-2534-new-tx-pointcut
This commit is contained in:
commit
b7166735f9
.editorconfig
hapi-deployable-pom
hapi-fhir-android
hapi-fhir-base
hapi-fhir-bom
hapi-fhir-cli
hapi-fhir-client-okhttp
hapi-fhir-client
hapi-fhir-converter
hapi-fhir-dist
hapi-fhir-docs
pom.xml
src/main/resources/ca/uhn/hapi/fhir
changelog/5_4_0
2533-fix-issue-with-reference-resources-not-being-returned-in-search-queries.yaml2535-do-not-create-snapshot-for-logical-structuredefinition.yaml2543-fix-issue-where-versionned-references-are-not-being-returned-properly.yaml2547-mdm-add-numeric-matcher.yaml2556-prevent-bulk-failure-while-partitioned.yamlchanges.yaml
docs/server_jpa_mdm
hapi-fhir-jacoco
hapi-fhir-jaxrsserver-base
hapi-fhir-jaxrsserver-example
hapi-fhir-jpaserver-api
hapi-fhir-jpaserver-base
pom.xml
src
main/java/ca/uhn/fhir/jpa
batch
bulk
export
api
job
BaseBulkItemReader.javaBulkExportCreateEntityStepListener.javaBulkExportGenerateResourceFilesStepListener.javaBulkExportJobCloser.javaBulkExportJobConfig.javaBulkExportJobParameterValidator.javaBulkExportJobParametersBuilder.javaBulkItemReader.javaCreateBulkExportEntityTasklet.javaGroupBulkExportJobParametersBuilder.javaGroupBulkItemReader.javaGroupIdPresentValidator.javaPatientBulkItemReader.javaResourceToFileWriter.javaResourceTypePartitioner.java
model
provider
svc
imprt
api
job
ActivateBulkImportEntityStepListener.javaBulkImportFileReader.javaBulkImportFileWriter.javaBulkImportJobCloser.javaBulkImportJobConfig.javaBulkImportJobParameterValidator.javaBulkImportPartitioner.javaBulkImportProcessStepCompletionPolicy.javaBulkImportStepListener.javaCreateBulkImportEntityTasklet.java
model
BulkImportJobFileJson.javaBulkImportJobJson.javaBulkImportJobStatusEnum.javaJobFileRowProcessingModeEnum.javaParsedBulkImportRecord.java
svc
config
dao
BaseHapiFhirSystemDao.javaBaseTransactionProcessor.javaFhirSystemDaoDstu2.javaTransactionProcessorVersionAdapterDstu2.java
data
dstu3
expunge
r4
r5
entity
packages
partition
test/java/ca/uhn/fhir/jpa/bulk
@ -31,6 +31,7 @@ charset = utf-8
|
||||
indent_style = tab
|
||||
tab_width = 3
|
||||
indent_size = 3
|
||||
continuation_indent_size=3
|
||||
ij_java_align_consecutive_assignments = false
|
||||
ij_java_align_consecutive_variable_declarations = false
|
||||
ij_java_align_group_field_declarations = false
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -0,0 +1,38 @@
|
||||
package ca.uhn.fhir.context.phonetic;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
|
||||
// Useful for numerical identifiers like phone numbers, address parts etc.
|
||||
// This should not be used where decimals are important. A new "quantity encoder" should be added to handle cases like that.
|
||||
public class NumericEncoder implements IPhoneticEncoder {
|
||||
@Override
|
||||
public String name() {
|
||||
return "NUMERIC";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String encode(String theString) {
|
||||
// Remove everything but the numbers
|
||||
return CharMatcher.inRange('0', '9').retainFrom(theString);
|
||||
}
|
||||
}
|
@ -39,7 +39,8 @@ public enum PhoneticEncoderEnum {
|
||||
METAPHONE(new ApacheEncoder("METAPHONE", new Metaphone())),
|
||||
NYSIIS(new ApacheEncoder("NYSIIS", new Nysiis())),
|
||||
REFINED_SOUNDEX(new ApacheEncoder("REFINED_SOUNDEX", new RefinedSoundex())),
|
||||
SOUNDEX(new ApacheEncoder("SOUNDEX", new Soundex()));
|
||||
SOUNDEX(new ApacheEncoder("SOUNDEX", new Soundex())),
|
||||
NUMERIC(new NumericEncoder());
|
||||
|
||||
private final IPhoneticEncoder myPhoneticEncoder;
|
||||
|
||||
|
@ -156,7 +156,8 @@ public class BundleBuilder {
|
||||
|
||||
// Bundle.entry.request.url
|
||||
IPrimitiveType<?> url = (IPrimitiveType<?>) myContext.getElementDefinition("uri").newInstance();
|
||||
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().getValue());
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
url.setValueAsString(theResource.getIdElement().toUnqualifiedVersionless().withResourceType(resourceType).getValue());
|
||||
myEntryRequestUrlChild.getMutator().setValue(request, url);
|
||||
|
||||
// Bundle.entry.request.url
|
||||
|
@ -68,7 +68,9 @@ public enum VersionEnum {
|
||||
V5_2_0,
|
||||
V5_2_1,
|
||||
V5_3_0,
|
||||
V5_4_0;
|
||||
V5_3_2,
|
||||
V5_4_0,
|
||||
;
|
||||
|
||||
public static VersionEnum latestVersion() {
|
||||
VersionEnum[] values = VersionEnum.values();
|
||||
|
@ -68,8 +68,8 @@ ca.uhn.fhir.validation.ValidationResult.noIssuesDetected=No issues detected duri
|
||||
|
||||
# JPA Messages
|
||||
|
||||
ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export
|
||||
ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
|
||||
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary resources may not be exported with bulk export
|
||||
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||
|
@ -1,14 +1,14 @@
|
||||
package ca.uhn.fhir.context.phonetic;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
class PhoneticEncoderTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(PhoneticEncoderTest.class);
|
||||
@ -23,7 +23,11 @@ class PhoneticEncoderTest {
|
||||
public void testEncodeAddress(PhoneticEncoderEnum thePhoneticEncoderEnum) {
|
||||
String encoded = thePhoneticEncoderEnum.getPhoneticEncoder().encode(ADDRESS_LINE);
|
||||
ourLog.info("{}: {}", thePhoneticEncoderEnum.name(), encoded);
|
||||
assertThat(encoded, startsWith(NUMBER + " "));
|
||||
assertThat(encoded, endsWith(" " + SUITE));
|
||||
if (thePhoneticEncoderEnum == PhoneticEncoderEnum.NUMERIC) {
|
||||
assertEquals(NUMBER + SUITE, encoded);
|
||||
} else {
|
||||
assertThat(encoded, startsWith(NUMBER + " "));
|
||||
assertThat(encoded, endsWith(" " + SUITE));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,14 +3,14 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@ -78,13 +78,13 @@
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu2</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-subscription</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@ -101,7 +101,7 @@
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-testpage-overlay</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<classifier>classes</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 2533
|
||||
title: "When issuing a request for a specific Resource and also specifying an _include param,
|
||||
the referenced resource is not returned when there is only 1 version of the referenced resource available.
|
||||
When there are more than 1 versions available, the referenced resource is returned in the response bundle."
|
||||
backport: 5.3.2
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 2535
|
||||
title: "An issue with package installer involving logical StructureDefinition resources was fixed. Package registry will no
|
||||
longer attempt to generate a snapshot for logical StructureDefinition resources if one is not already provided in the
|
||||
resource definition."
|
@ -0,0 +1,9 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 2543
|
||||
title: "When issuing a request for a specific Resource and also specifying an _include param,
|
||||
the proper historical referenced resource is not returned when there are more than 1 versions of the
|
||||
referenced resource available, after the reference has been changed from the original version 1 to some other version.
|
||||
When there are more than 1 versions available, and the referring resource had previously referred to version 1
|
||||
but now refers to version 4, the resource returned in the response bundle is for version 1."
|
||||
backport: 5.3.2
|
5
hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2547-mdm-add-numeric-matcher.yaml
Normal file
5
hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2547-mdm-add-numeric-matcher.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: add
|
||||
issue: 2547
|
||||
title: "Added new NUMERIC mdm matcher for matching phone numbers. Also added NUMERIC phonetic encoder to support
|
||||
adding NUMERIC encoded search parameter (e.g. if searching for matching phone numbers is required by mdm candidate searching)."
|
4
hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2556-prevent-bulk-failure-while-partitioned.yaml
Normal file
4
hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_4_0/2556-prevent-bulk-failure-while-partitioned.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 2556
|
||||
title: "Fixed a bug which would cause Bulk Export to fail when run in a partitioned environment."
|
@ -0,0 +1,26 @@
|
||||
---
|
||||
- item:
|
||||
type: "add"
|
||||
title: "The version of a few dependencies have been bumped to the latest versions
|
||||
(dependent HAPI modules listed in brackets):
|
||||
<ul>
|
||||
<li>Commons-Lang3 (Core): 3.9 -> 3.12.0</li>
|
||||
<li>Commons-Text (Core): 1.7 -> 1.9</li>
|
||||
<li>Commons-Codec (Core): 1.14 -> 1.15</li>
|
||||
<li>Commons-IO (Core): 2.6 -> 2.8.0</li>
|
||||
<li>Guava (Core): 30.1-jre -> 30.1.1-jre</li>
|
||||
<li>Jackson (Core): 2.12.1 -> 2.12.3</li>
|
||||
<li>Woodstox (Core): 6.2.3 -> 6.2.5</li>
|
||||
<li>Gson (JPA): 2.8.5 -> 2.8.6</li>
|
||||
<li>Caffeine (JPA): 2.7.0 -> 3.0.1</li>
|
||||
<li>Hibernate (JPA): 5.4.26.Final -> 5.4.30.Final</li>
|
||||
<li>Hibernate Search (JPA): 6.0.0.Final -> 6.0.2.Final</li>
|
||||
<li>Spring (JPA): 5.3.3 -> 5.3.6</li>
|
||||
<li>Spring Batch (JPA): 4.2.3.RELEASE -> 4.3.2</li>
|
||||
<li>Spring Data (JPA): 2.4.2 -> 2.4.7</li>
|
||||
<li>Commons DBCP2 (JPA): 2.7.0 -> 2.8.0</li>
|
||||
<li>Thymeleaf (Testpage Overlay): 3.0.11.RELEASE -> 3.0.12.RELEASE</li>
|
||||
<li>JAnsi (CLI): 2.1.1 -> 2.3.2</li>
|
||||
<li>JArchivelib (CLI): 1.0.0 -> 1.1.0</li>
|
||||
</ul>
|
||||
"
|
@ -292,10 +292,10 @@ The following algorithms are currently supported:
|
||||
<td>Gail = Gael, Gail != Gale, Thomas != Tom</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>CAVERPHONE1</td>
|
||||
<td>CAVERPHONE2</td>
|
||||
<td>matcher</td>
|
||||
<td>
|
||||
<a href="https://commons.apache.org/proper/commons-codec/apidocs/org/apache/commons/codec/language/Caverphone1.html">Apache Caverphone1</a>
|
||||
<a href="https://commons.apache.org/proper/commons-codec/apidocs/org/apache/commons/codec/language/Caverphone2.html">Apache Caverphone2</a>
|
||||
</td>
|
||||
<td>Gail = Gael, Gail = Gale, Thomas != Tom</td>
|
||||
</tr>
|
||||
@ -379,6 +379,14 @@ The following algorithms are currently supported:
|
||||
</td>
|
||||
<td>2019-12,Month = 2019-12-19,Day</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>NUMERIC</td>
|
||||
<td>matcher</td>
|
||||
<td>
|
||||
Remove all non-numeric characters from the string before comparing.
|
||||
</td>
|
||||
<td>4169671111 = (416) 967-1111</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>NAME_ANY_ORDER</td>
|
||||
<td>matcher</td>
|
||||
|
@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -67,6 +67,18 @@ public interface IFhirSystemDao<T, MT> extends IDao {
|
||||
*/
|
||||
IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage);
|
||||
|
||||
/**
|
||||
* Executes a FHIR transaction using a new database transaction. This method must
|
||||
* not be called from within a DB transaction.
|
||||
*/
|
||||
T transaction(RequestDetails theRequestDetails, T theResources);
|
||||
|
||||
/**
|
||||
* Executes a FHIR transaction nested inside the current database transaction.
|
||||
* This form of the transaction processor can handle write operations only (no reads)
|
||||
*/
|
||||
default T transactionNested(RequestDetails theRequestDetails, T theResources) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE5-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE6-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -20,17 +20,20 @@ package ca.uhn.fhir.jpa.batch;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
@Configuration
|
||||
//When you define a new batch job, add it here.
|
||||
@Import({
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class,
|
||||
BulkImportJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
public static final String BULK_IMPORT_JOB_NAME = "bulkImportJob";
|
||||
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
|
||||
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
|
||||
public static final String PATIENT_BULK_EXPORT_JOB_NAME = "patientBulkExportJob";
|
||||
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.util.ExtensionUtil;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.api;
|
||||
package ca.uhn.fhir.jpa.bulk.export.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.api;
|
||||
package ca.uhn.fhir.jpa.bulk.export.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.api;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
@ -50,7 +50,7 @@ public interface IBulkDataExportSvc {
|
||||
|
||||
class JobInfo {
|
||||
private String myJobId;
|
||||
private BulkJobStatusEnum myStatus;
|
||||
private BulkExportJobStatusEnum myStatus;
|
||||
private List<FileEntry> myFiles;
|
||||
private String myRequest;
|
||||
private Date myStatusTime;
|
||||
@ -90,11 +90,11 @@ public interface IBulkDataExportSvc {
|
||||
|
||||
}
|
||||
|
||||
public BulkJobStatusEnum getStatus() {
|
||||
public BulkExportJobStatusEnum getStatus() {
|
||||
return myStatus;
|
||||
}
|
||||
|
||||
public JobInfo setStatus(BulkJobStatusEnum theStatus) {
|
||||
public JobInfo setStatus(BulkExportJobStatusEnum theStatus) {
|
||||
myStatus = theStatus;
|
||||
return this;
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
@ -102,7 +101,7 @@ public abstract class BaseBulkItemReader implements ItemReader<List<ResourcePers
|
||||
myPidIterator = getResourcePidIterator();
|
||||
}
|
||||
|
||||
abstract Iterator<ResourcePersistentId> getResourcePidIterator();
|
||||
protected abstract Iterator<ResourcePersistentId> getResourcePidIterator();
|
||||
|
||||
protected List<SearchParameterMap> createSearchParameterMapsForResourceType() {
|
||||
BulkExportJobEntity jobEntity = getJobEntity();
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,16 +20,12 @@ package ca.uhn.fhir.jpa.bulk.job;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
@ -43,7 +39,7 @@ public class BulkExportCreateEntityStepListener implements StepExecutionListener
|
||||
public void beforeStep(StepExecution theStepExecution) {
|
||||
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID");
|
||||
if (jobUuid != null) {
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.BUILDING);
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.BUILDING);
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
@ -55,7 +55,7 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio
|
||||
}
|
||||
assert isNotBlank(jobUuid);
|
||||
String exitDescription = theStepExecution.getExitStatus().getExitDescription();
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.ERROR, exitDescription);
|
||||
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkExportJobStatusEnum.ERROR, exitDescription);
|
||||
}
|
||||
return theStepExecution.getExitStatus();
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
@ -44,9 +44,9 @@ public class BulkExportJobCloser implements Tasklet {
|
||||
@Override
|
||||
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
|
||||
if (theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus() == BatchStatus.STARTED) {
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.COMPLETE);
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.COMPLETE);
|
||||
} else {
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkJobStatusEnum.ERROR);
|
||||
myBulkExportDaoSvc.setJobToStatus(myJobUUID, BulkExportJobStatusEnum.ERROR);
|
||||
}
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.processors.GoldenResourceAnnotatingProcessor;
|
||||
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
@ -35,8 +35,6 @@ import org.springframework.batch.core.configuration.annotation.JobScope;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.item.ItemProcessor;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.batch.item.support.CompositeItemProcessor;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
@ -59,6 +57,7 @@ public class BulkExportJobConfig {
|
||||
public static final String GROUP_ID_PARAMETER = "groupId";
|
||||
public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes";
|
||||
public static final int CHUNK_SIZE = 100;
|
||||
public static final String JOB_DESCRIPTION = "jobDescription";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
@ -90,9 +89,9 @@ public class BulkExportJobConfig {
|
||||
@Lazy
|
||||
public Job bulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(partitionStep())
|
||||
.next(bulkExportPartitionStep())
|
||||
.next(closeJobStep())
|
||||
.build();
|
||||
}
|
||||
@ -114,7 +113,7 @@ public class BulkExportJobConfig {
|
||||
public Job groupBulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
|
||||
.validator(groupBulkJobParameterValidator())
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(groupPartitionStep())
|
||||
.next(closeJobStep())
|
||||
@ -125,7 +124,7 @@ public class BulkExportJobConfig {
|
||||
@Lazy
|
||||
public Job patientBulkExportJob() {
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME)
|
||||
.validator(bulkJobParameterValidator())
|
||||
.validator(bulkExportJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(patientPartitionStep())
|
||||
.next(closeJobStep())
|
||||
@ -150,8 +149,9 @@ public class BulkExportJobConfig {
|
||||
return new CreateBulkExportEntityTasklet();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator bulkJobParameterValidator() {
|
||||
public JobParametersValidator bulkExportJobParameterValidator() {
|
||||
return new BulkExportJobParameterValidator();
|
||||
}
|
||||
|
||||
@ -159,7 +159,7 @@ public class BulkExportJobConfig {
|
||||
@Bean
|
||||
public Step groupBulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("groupBulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(groupBulkItemReader())
|
||||
.processor(inflateResourceThenAnnotateWithGoldenResourceProcessor())
|
||||
.writer(resourceToFileWriter())
|
||||
@ -170,17 +170,18 @@ public class BulkExportJobConfig {
|
||||
@Bean
|
||||
public Step bulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("bulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(bulkItemReader())
|
||||
.processor(myPidToIBaseResourceProcessor)
|
||||
.writer(resourceToFileWriter())
|
||||
.listener(bulkExportGenerateResourceFilesStepListener())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step patientBulkExportGenerateResourceFilesStep() {
|
||||
return myStepBuilderFactory.get("patientBulkExportGenerateResourceFilesStep")
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>> chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.<List<ResourcePersistentId>, List<IBaseResource>>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
|
||||
.reader(patientBulkItemReader())
|
||||
.processor(myPidToIBaseResourceProcessor)
|
||||
.writer(resourceToFileWriter())
|
||||
@ -214,7 +215,7 @@ public class BulkExportJobConfig {
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step partitionStep() {
|
||||
public Step bulkExportPartitionStep() {
|
||||
return myStepBuilderFactory.get("partitionStep")
|
||||
.partitioner("bulkExportGenerateResourceFilesStep", bulkExportResourceTypePartitioner())
|
||||
.step(bulkExportGenerateResourceFilesStep())
|
||||
@ -240,7 +241,7 @@ public class BulkExportJobConfig {
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public GroupBulkItemReader groupBulkItemReader(){
|
||||
public GroupBulkItemReader groupBulkItemReader() {
|
||||
return new GroupBulkItemReader();
|
||||
}
|
||||
|
||||
@ -252,7 +253,7 @@ public class BulkExportJobConfig {
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkItemReader bulkItemReader(){
|
||||
public BulkItemReader bulkItemReader() {
|
||||
return new BulkItemReader();
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.springframework.batch.core.JobParametersBuilder;
|
||||
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
@ -43,7 +42,7 @@ public class BulkItemReader extends BaseBulkItemReader {
|
||||
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
ourLog.info("Bulk export assembling export of type {} for job {}", myResourceType, myJobUUID);
|
||||
Set<ResourcePersistentId> myReadPids = new HashSet<>();
|
||||
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,8 +20,8 @@ package ca.uhn.fhir.jpa.bulk.job;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@ -87,7 +87,7 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
|
||||
}
|
||||
}
|
||||
|
||||
private void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
|
||||
public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
|
||||
theChunkContext
|
||||
.getStepContext()
|
||||
.getStepExecution()
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.dao.data.IMdmLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
@ -36,7 +37,6 @@ import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import com.google.common.collect.Multimaps;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
@ -81,7 +81,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
||||
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
Set<ResourcePersistentId> myReadPids = new HashSet<>();
|
||||
|
||||
//Short circuit out if we detect we are attempting to extract patients
|
||||
@ -119,7 +119,8 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
||||
Set<Long> patientPidsToExport = new HashSet<>(pidsOrThrowException);
|
||||
|
||||
if (myMdmEnabled) {
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails srd = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), srd);
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
List<IMdmLinkDao.MdmPidTuple> goldenPidSourcePidTuple = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
|
||||
goldenPidSourcePidTuple.forEach(tuple -> {
|
||||
@ -179,13 +180,12 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
||||
* @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"]
|
||||
*/
|
||||
private List<String> getMembers() {
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
|
||||
List<IPrimitiveType> evaluate = myContext.newFhirPath().evaluate(group, "member.entity.reference", IPrimitiveType.class);
|
||||
return evaluate.stream().map(IPrimitiveType::getValueAsString).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Given the local myGroupId, perform an expansion to retrieve all resource IDs of member patients.
|
||||
* if myMdmEnabled is set to true, we also reach out to the IMdmLinkDao to attempt to also expand it into matched
|
||||
@ -195,7 +195,8 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
||||
*/
|
||||
private Set<String> expandAllPatientPidsFromGroup() {
|
||||
Set<String> expandedIds = new HashSet<>();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId));
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.newSystemRequestAllPartitions();
|
||||
IBaseResource group = myDaoRegistry.getResourceDao("Group").read(new IdDt(myGroupId), requestDetails);
|
||||
Long pidOrNull = myIdHelperService.getPidOrNull(group);
|
||||
|
||||
//Attempt to perform MDM Expansion of membership
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -26,7 +26,7 @@ import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
|
||||
|
||||
import static ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig.*;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig.*;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class GroupIdPresentValidator implements JobParametersValidator {
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -61,7 +61,7 @@ public class PatientBulkItemReader extends BaseBulkItemReader implements ItemRea
|
||||
}
|
||||
|
||||
@Override
|
||||
Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
protected Iterator<ResourcePersistentId> getResourcePidIterator() {
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
|
||||
String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export";
|
||||
ourLog.error(errorMessage);
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -25,8 +25,9 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.util.BinaryUtil;
|
||||
@ -100,7 +101,7 @@ public class ResourceToFileWriter implements ItemWriter<List<IBaseResource>> {
|
||||
IBaseBinary binary = BinaryUtil.newBinary(myFhirContext);
|
||||
binary.setContentType(Constants.CT_FHIR_NDJSON);
|
||||
binary.setContent(myOutputStream.toByteArray());
|
||||
DaoMethodOutcome outcome = myBinaryDao.create(binary);
|
||||
DaoMethodOutcome outcome = myBinaryDao.create(binary, new SystemRequestDetails());
|
||||
return outcome.getResource().getIdElement();
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.job;
|
||||
package ca.uhn.fhir.jpa.bulk.export.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.core.partition.support.Partitioner;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.model;
|
||||
package ca.uhn.fhir.jpa.bulk.export.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,7 +20,14 @@ package ca.uhn.fhir.jpa.bulk.model;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public enum BulkJobStatusEnum {
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
|
||||
@JsonFormat(shape = JsonFormat.Shape.STRING)
|
||||
public enum BulkExportJobStatusEnum {
|
||||
|
||||
/**
|
||||
* Sorting OK!
|
||||
*/
|
||||
|
||||
SUBMITTED,
|
||||
BUILDING,
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.model;
|
||||
package ca.uhn.fhir.jpa.bulk.export.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.provider;
|
||||
package ca.uhn.fhir.jpa.bulk.export.provider;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -21,9 +21,9 @@ package ca.uhn.fhir.jpa.bulk.provider;
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -23,16 +23,15 @@ package ca.uhn.fhir.jpa.bulk.svc;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
@ -43,16 +42,13 @@ import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.quartz.JobExecutionContext;
|
||||
@ -78,9 +74,9 @@ import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.GROUP;
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.PATIENT;
|
||||
import static ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions.ExportStyle.SYSTEM;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.GROUP;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.PATIENT;
|
||||
import static ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions.ExportStyle.SYSTEM;
|
||||
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParam;
|
||||
import static ca.uhn.fhir.util.UrlUtil.escapeUrlParams;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
@ -136,7 +132,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
||||
|
||||
Optional<BulkExportJobEntity> jobToProcessOpt = myTxTemplate.execute(t -> {
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkJobStatusEnum.SUBMITTED);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByStatus(page, BulkExportJobStatusEnum.SUBMITTED);
|
||||
if (submittedJobs.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
@ -158,7 +154,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
||||
Optional<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findByJobId(jobUuid);
|
||||
if (submittedJobs.isPresent()) {
|
||||
BulkExportJobEntity jobEntity = submittedJobs.get();
|
||||
jobEntity.setStatus(BulkJobStatusEnum.ERROR);
|
||||
jobEntity.setStatus(BulkExportJobStatusEnum.ERROR);
|
||||
jobEntity.setStatusMessage(e.getMessage());
|
||||
myBulkExportJobDao.save(jobEntity);
|
||||
}
|
||||
@ -208,8 +204,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
||||
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||
|
||||
ourLog.info("Purging bulk data file: {}", nextFile.getResourceId());
|
||||
getBinaryDao().delete(toId(nextFile.getResourceId()));
|
||||
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), null);
|
||||
getBinaryDao().delete(toId(nextFile.getResourceId()), new SystemRequestDetails());
|
||||
getBinaryDao().forceExpungeInExistingTransaction(toId(nextFile.getResourceId()), new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), new SystemRequestDetails());
|
||||
myBulkExportCollectionFileDao.deleteByPid(nextFile.getId());
|
||||
|
||||
}
|
||||
@ -344,7 +340,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
||||
if (useCache) {
|
||||
Date cutoff = DateUtils.addMilliseconds(new Date(), -myReuseBulkExportForMillis);
|
||||
Pageable page = PageRequest.of(0, 10);
|
||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkJobStatusEnum.ERROR);
|
||||
Slice<BulkExportJobEntity> existing = myBulkExportJobDao.findExistingJob(page, request, cutoff, BulkExportJobStatusEnum.ERROR);
|
||||
if (!existing.isEmpty()) {
|
||||
return toSubmittedJobInfo(existing.iterator().next());
|
||||
}
|
||||
@ -373,7 +369,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
||||
|
||||
BulkExportJobEntity job = new BulkExportJobEntity();
|
||||
job.setJobId(UUID.randomUUID().toString());
|
||||
job.setStatus(BulkJobStatusEnum.SUBMITTED);
|
||||
job.setStatus(BulkExportJobStatusEnum.SUBMITTED);
|
||||
job.setSince(since);
|
||||
job.setCreated(new Date());
|
||||
job.setRequest(request);
|
||||
@ -445,7 +441,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
||||
retVal.setStatusMessage(job.getStatusMessage());
|
||||
retVal.setRequest(job.getRequest());
|
||||
|
||||
if (job.getStatus() == BulkJobStatusEnum.COMPLETE) {
|
||||
if (job.getStatus() == BulkExportJobStatusEnum.COMPLETE) {
|
||||
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
|
||||
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||
retVal.addFile()
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
@ -1,4 +1,4 @@
|
||||
package ca.uhn.fhir.jpa.bulk.svc;
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
@ -20,9 +20,7 @@ package ca.uhn.fhir.jpa.bulk.svc;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
@ -84,12 +82,12 @@ public class BulkExportDaoSvc {
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus) {
|
||||
public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus) {
|
||||
setJobToStatus(theJobUUID, theStatus, null);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void setJobToStatus(String theJobUUID, BulkJobStatusEnum theStatus, String theStatusMessage) {
|
||||
public void setJobToStatus(String theJobUUID, BulkExportJobStatusEnum theStatus, String theStatusMessage) {
|
||||
Optional<BulkExportJobEntity> oJob = myBulkExportJobDao.findByJobId(theJobUUID);
|
||||
if (!oJob.isPresent()) {
|
||||
ourLog.error("Job with UUID {} doesn't exist!", theJobUUID);
|
93
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java
Normal file
93
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/api/IBulkDataImportSvc.java
Normal file
@ -0,0 +1,93 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.api;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
|
||||
public interface IBulkDataImportSvc {
|
||||
|
||||
/**
|
||||
* Create a new job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state (meaning it won't yet be worked on and can be added to)
|
||||
*/
|
||||
String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List<BulkImportJobFileJson> theInitialFiles);
|
||||
|
||||
/**
|
||||
* Add more files to a job in {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING} state
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @param theFiles The files to add to the job
|
||||
*/
|
||||
void addFilesToJob(String theJobId, List<BulkImportJobFileJson> theFiles);
|
||||
|
||||
/**
|
||||
* Move a job from {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#STAGING STAGING}
|
||||
* state to {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY}
|
||||
* state, meaning that is is a candidate to be picked up for processing
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
*/
|
||||
void markJobAsReadyForActivation(String theJobId);
|
||||
|
||||
/**
|
||||
* This method is intended to be called from the job scheduler, and will begin execution on
|
||||
* the next job in status {@link ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum#READY READY}
|
||||
*
|
||||
* @return Returns {@literal true} if a job was activated
|
||||
*/
|
||||
boolean activateNextReadyJob();
|
||||
|
||||
/**
|
||||
* Updates the job status for the given job
|
||||
*/
|
||||
void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus);
|
||||
|
||||
/**
|
||||
* Updates the job status for the given job
|
||||
*/
|
||||
void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage);
|
||||
|
||||
/**
|
||||
* Gets the number of files available for a given Job ID
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @return The file count
|
||||
*/
|
||||
BulkImportJobJson fetchJob(String theJobId);
|
||||
|
||||
/**
|
||||
* Fetch a given file by job ID
|
||||
*
|
||||
* @param theJobId The job ID
|
||||
* @param theFileIndex The index of the file within the job
|
||||
* @return The file
|
||||
*/
|
||||
BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex);
|
||||
|
||||
/**
|
||||
* Delete all input files associated with a particular job
|
||||
*/
|
||||
void deleteJobFiles(String theJobId);
|
||||
}
|
51
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java
Normal file
51
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java
Normal file
@ -0,0 +1,51 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
*/
|
||||
public class ActivateBulkImportEntityStepListener implements StepExecutionListener {
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkImportDaoSvc;
|
||||
|
||||
@Override
|
||||
public void beforeStep(StepExecution theStepExecution) {
|
||||
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
|
||||
if (jobUuid != null) {
|
||||
myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExitStatus afterStep(StepExecution theStepExecution) {
|
||||
return ExitStatus.EXECUTING;
|
||||
}
|
||||
}
|
76
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java
Normal file
76
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java
Normal file
@ -0,0 +1,76 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import ca.uhn.fhir.util.IoUtil;
|
||||
import com.google.common.io.LineReader;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.io.StringReader;
|
||||
|
||||
@SuppressWarnings("UnstableApiUsage")
|
||||
public class BulkImportFileReader implements ItemReader<ParsedBulkImportRecord> {
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUuid;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
|
||||
private int myFileIndex;
|
||||
|
||||
private StringReader myReader;
|
||||
private LineReader myLineReader;
|
||||
private int myLineIndex;
|
||||
private String myTenantName;
|
||||
|
||||
@Override
|
||||
public ParsedBulkImportRecord read() throws Exception {
|
||||
|
||||
if (myReader == null) {
|
||||
BulkImportJobFileJson file = myBulkDataImportSvc.fetchFile(myJobUuid, myFileIndex);
|
||||
myTenantName = file.getTenantName();
|
||||
myReader = new StringReader(file.getContents());
|
||||
myLineReader = new LineReader(myReader);
|
||||
}
|
||||
|
||||
String nextLine = myLineReader.readLine();
|
||||
if (nextLine == null) {
|
||||
IoUtil.closeQuietly(myReader);
|
||||
return null;
|
||||
}
|
||||
|
||||
Logs.getBatchTroubleshootingLog().debug("Reading line {} file index {} for job: {}", myLineIndex++, myFileIndex, myJobUuid);
|
||||
|
||||
IBaseResource parsed = myFhirContext.newJsonParser().parseResource(nextLine);
|
||||
return new ParsedBulkImportRecord(myTenantName, parsed);
|
||||
}
|
||||
}
|
74
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java
Normal file
74
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java
Normal file
@ -0,0 +1,74 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class BulkImportFileWriter implements ItemWriter<ParsedBulkImportRecord> {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class);
|
||||
@Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUuid;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
|
||||
private int myFileIndex;
|
||||
@Value("#{stepExecutionContext['" + BulkImportPartitioner.ROW_PROCESSING_MODE + "']}")
|
||||
private JobFileRowProcessingModeEnum myRowProcessingMode;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
@SuppressWarnings({"SwitchStatementWithTooFewBranches", "rawtypes", "unchecked"})
|
||||
@Override
|
||||
public void write(List<? extends ParsedBulkImportRecord> theItemLists) throws Exception {
|
||||
ourLog.info("Beginning bulk import write {} chunks Job[{}] FileIndex[{}]", theItemLists.size(), myJobUuid, myFileIndex);
|
||||
|
||||
for (ParsedBulkImportRecord nextItem : theItemLists) {
|
||||
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setTenantId(nextItem.getTenantName());
|
||||
|
||||
// Yeah this is a lame switch - We'll add more later I swear
|
||||
switch (myRowProcessingMode) {
|
||||
default:
|
||||
case FHIR_TRANSACTION:
|
||||
IFhirSystemDao systemDao = myDaoRegistry.getSystemDao();
|
||||
IBaseResource inputBundle = nextItem.getRowContent();
|
||||
systemDao.transactionNested(requestDetails, inputBundle);
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
57
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java
Normal file
57
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java
Normal file
@ -0,0 +1,57 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
import org.springframework.batch.core.step.tasklet.Tasklet;
|
||||
import org.springframework.batch.repeat.RepeatStatus;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
/**
|
||||
* Will run before and after a job to set the status to whatever is appropriate.
|
||||
*/
|
||||
public class BulkImportJobCloser implements Tasklet {
|
||||
|
||||
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUUID;
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
|
||||
@Override
|
||||
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) {
|
||||
BatchStatus executionStatus = theChunkContext.getStepContext().getStepExecution().getJobExecution().getStatus();
|
||||
if (executionStatus == BatchStatus.STARTED) {
|
||||
myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.COMPLETE);
|
||||
myBulkDataImportSvc.deleteJobFiles(myJobUUID);
|
||||
} else {
|
||||
myBulkDataImportSvc.setJobToStatus(myJobUUID, BulkImportJobStatusEnum.ERROR, "Found job in status: " + executionStatus);
|
||||
myBulkDataImportSvc.deleteJobFiles(myJobUUID);
|
||||
}
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
||||
}
|
169
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java
Normal file
169
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java
Normal file
@ -0,0 +1,169 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchConstants;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.JobScope;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.batch.core.partition.PartitionHandler;
|
||||
import org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.batch.repeat.CompletionPolicy;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.core.task.TaskExecutor;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME;
|
||||
|
||||
/**
|
||||
* Spring batch Job configuration file. Contains all necessary plumbing to run a
|
||||
* Bulk Export job.
|
||||
*/
|
||||
@Configuration
|
||||
public class BulkImportJobConfig {
|
||||
|
||||
public static final String JOB_PARAM_COMMIT_INTERVAL = "commitInterval";
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR)
|
||||
private TaskExecutor myTaskExecutor;
|
||||
|
||||
@Bean(name = BULK_IMPORT_JOB_NAME)
|
||||
@Lazy
|
||||
public Job bulkImportJob() throws Exception {
|
||||
return myJobBuilderFactory.get(BULK_IMPORT_JOB_NAME)
|
||||
.validator(bulkImportJobParameterValidator())
|
||||
.start(bulkImportPartitionStep())
|
||||
.next(bulkImportCloseJobStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator bulkImportJobParameterValidator() {
|
||||
return new BulkImportJobParameterValidator();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public CreateBulkImportEntityTasklet createBulkImportEntityTasklet() {
|
||||
return new CreateBulkImportEntityTasklet();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public ActivateBulkImportEntityStepListener activateBulkImportEntityStepListener() {
|
||||
return new ActivateBulkImportEntityStepListener();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step bulkImportPartitionStep() throws Exception {
|
||||
return myStepBuilderFactory.get("bulkImportPartitionStep")
|
||||
.partitioner("bulkImportPartitionStep", bulkImportPartitioner())
|
||||
.partitionHandler(partitionHandler())
|
||||
.listener(activateBulkImportEntityStepListener())
|
||||
.gridSize(10)
|
||||
.build();
|
||||
}
|
||||
|
||||
private PartitionHandler partitionHandler() throws Exception {
|
||||
assert myTaskExecutor != null;
|
||||
|
||||
TaskExecutorPartitionHandler retVal = new TaskExecutorPartitionHandler();
|
||||
retVal.setStep(bulkImportProcessFilesStep());
|
||||
retVal.setTaskExecutor(myTaskExecutor);
|
||||
retVal.afterPropertiesSet();
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Step bulkImportCloseJobStep() {
|
||||
return myStepBuilderFactory.get("bulkImportCloseJobStep")
|
||||
.tasklet(bulkImportJobCloser())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public BulkImportJobCloser bulkImportJobCloser() {
|
||||
return new BulkImportJobCloser();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@JobScope
|
||||
public BulkImportPartitioner bulkImportPartitioner() {
|
||||
return new BulkImportPartitioner();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Step bulkImportProcessFilesStep() {
|
||||
CompletionPolicy completionPolicy = completionPolicy();
|
||||
|
||||
return myStepBuilderFactory.get("bulkImportProcessFilesStep")
|
||||
.<ParsedBulkImportRecord, ParsedBulkImportRecord>chunk(completionPolicy)
|
||||
.reader(bulkImportFileReader())
|
||||
.writer(bulkImportFileWriter())
|
||||
.listener(bulkImportStepListener())
|
||||
.listener(completionPolicy)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public CompletionPolicy completionPolicy() {
|
||||
return new BulkImportProcessStepCompletionPolicy();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public ItemWriter<ParsedBulkImportRecord> bulkImportFileWriter() {
|
||||
return new BulkImportFileWriter();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkImportFileReader bulkImportFileReader() {
|
||||
return new BulkImportFileReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BulkImportStepListener bulkImportStepListener() {
|
||||
return new BulkImportStepListener();
|
||||
}
|
||||
|
||||
|
||||
}
|
70
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java
Normal file
70
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java
Normal file
@ -0,0 +1,70 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* This class will prevent a job from running if the UUID does not exist or is invalid.
|
||||
*/
|
||||
public class BulkImportJobParameterValidator implements JobParametersValidator {
|
||||
|
||||
@Autowired
|
||||
private IBulkImportJobDao myBulkImportJobDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
||||
@Override
|
||||
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
|
||||
if (theJobParameters == null) {
|
||||
throw new JobParametersInvalidException("This job needs Parameters: [jobUUID]");
|
||||
}
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
String errorMessage = txTemplate.execute(tx -> {
|
||||
StringBuilder errorBuilder = new StringBuilder();
|
||||
String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
|
||||
Optional<BulkImportJobEntity> oJob = myBulkImportJobDao.findByJobId(jobUUID);
|
||||
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
|
||||
errorBuilder.append("There is no persisted job that exists with UUID: ");
|
||||
errorBuilder.append(jobUUID);
|
||||
errorBuilder.append(". ");
|
||||
}
|
||||
|
||||
return errorBuilder.toString();
|
||||
});
|
||||
|
||||
if (!StringUtils.isEmpty(errorMessage)) {
|
||||
throw new JobParametersInvalidException(errorMessage);
|
||||
}
|
||||
}
|
||||
}
|
72
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java
Normal file
72
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java
Normal file
@ -0,0 +1,72 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.batch.core.partition.support.Partitioner;
|
||||
import org.springframework.batch.item.ExecutionContext;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class BulkImportPartitioner implements Partitioner {
|
||||
public static final String FILE_INDEX = "fileIndex";
|
||||
public static final String ROW_PROCESSING_MODE = "rowProcessingMode";
|
||||
|
||||
private static final Logger ourLog = getLogger(BulkImportPartitioner.class);
|
||||
|
||||
@Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
|
||||
private String myJobUUID;
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public Map<String, ExecutionContext> partition(int gridSize) {
|
||||
Map<String, ExecutionContext> retVal = new HashMap<>();
|
||||
|
||||
BulkImportJobJson job = myBulkDataImportSvc.fetchJob(myJobUUID);
|
||||
|
||||
for (int i = 0; i < job.getFileCount(); i++) {
|
||||
|
||||
ExecutionContext context = new ExecutionContext();
|
||||
context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID);
|
||||
context.putInt(FILE_INDEX, i);
|
||||
context.put(ROW_PROCESSING_MODE, job.getProcessingMode());
|
||||
|
||||
String key = "FILE" + i;
|
||||
retVal.put(key, context);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
}
|
41
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java
Normal file
41
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportProcessStepCompletionPolicy.java
Normal file
@ -0,0 +1,41 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.batch.repeat.RepeatContext;
|
||||
import org.springframework.batch.repeat.policy.CompletionPolicySupport;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import static ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL;
|
||||
|
||||
public class BulkImportProcessStepCompletionPolicy extends CompletionPolicySupport {
|
||||
|
||||
@Value("#{jobParameters['" + JOB_PARAM_COMMIT_INTERVAL + "']}")
|
||||
private int myChunkSize;
|
||||
|
||||
@Override
|
||||
public boolean isComplete(RepeatContext context) {
|
||||
if (context.getStartedCount() < myChunkSize) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
63
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java
Normal file
63
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java
Normal file
@ -0,0 +1,63 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import org.springframework.batch.core.ExitStatus;
|
||||
import org.springframework.batch.core.StepExecution;
|
||||
import org.springframework.batch.core.StepExecutionListener;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* This class sets the job status to ERROR if any failures occur while actually
|
||||
* generating the export files.
|
||||
*/
|
||||
public class BulkImportStepListener implements StepExecutionListener {
|
||||
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myBulkDataImportSvc;
|
||||
|
||||
@Override
|
||||
public void beforeStep(@Nonnull StepExecution stepExecution) {
|
||||
// nothing
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExitStatus afterStep(StepExecution theStepExecution) {
|
||||
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
|
||||
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
|
||||
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
|
||||
if (jobUuid == null) {
|
||||
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
|
||||
}
|
||||
assert isNotBlank(jobUuid);
|
||||
String exitDescription = theStepExecution.getExitStatus().getExitDescription();
|
||||
myBulkDataImportSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.ERROR, exitDescription);
|
||||
}
|
||||
return theStepExecution.getExitStatus();
|
||||
}
|
||||
}
|
45
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java
Normal file
45
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java
Normal file
@ -0,0 +1,45 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
import org.springframework.batch.core.step.tasklet.Tasklet;
|
||||
import org.springframework.batch.repeat.RepeatStatus;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class CreateBulkImportEntityTasklet implements Tasklet {
|
||||
|
||||
@Override
|
||||
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) throws Exception {
|
||||
Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters();
|
||||
|
||||
//We can leave early if they provided us with an existing job.
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER), "Job doesn't have a UUID");
|
||||
CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER));
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
||||
|
||||
}
|
51
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java
Normal file
51
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobFileJson.java
Normal file
@ -0,0 +1,51 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public class BulkImportJobFileJson implements IModelJson {
|
||||
|
||||
@JsonProperty("tenantName")
|
||||
private String myTenantName;
|
||||
@JsonProperty("contents")
|
||||
private String myContents;
|
||||
|
||||
public String getTenantName() {
|
||||
return myTenantName;
|
||||
}
|
||||
|
||||
public BulkImportJobFileJson setTenantName(String theTenantName) {
|
||||
myTenantName = theTenantName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getContents() {
|
||||
return myContents;
|
||||
}
|
||||
|
||||
public BulkImportJobFileJson setContents(String theContents) {
|
||||
myContents = theContents;
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
72
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java
Normal file
72
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobJson.java
Normal file
@ -0,0 +1,72 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public class BulkImportJobJson implements IModelJson {
|
||||
|
||||
@JsonProperty("processingMode")
|
||||
private JobFileRowProcessingModeEnum myProcessingMode;
|
||||
@JsonProperty("jobDescription")
|
||||
private String myJobDescription;
|
||||
@JsonProperty("fileCount")
|
||||
private int myFileCount;
|
||||
@JsonProperty("batchSize")
|
||||
private int myBatchSize;
|
||||
|
||||
public String getJobDescription() {
|
||||
return myJobDescription;
|
||||
}
|
||||
|
||||
public BulkImportJobJson setJobDescription(String theJobDescription) {
|
||||
myJobDescription = theJobDescription;
|
||||
return this;
|
||||
}
|
||||
|
||||
public JobFileRowProcessingModeEnum getProcessingMode() {
|
||||
return myProcessingMode;
|
||||
}
|
||||
|
||||
public BulkImportJobJson setProcessingMode(JobFileRowProcessingModeEnum theProcessingMode) {
|
||||
myProcessingMode = theProcessingMode;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getFileCount() {
|
||||
return myFileCount;
|
||||
}
|
||||
|
||||
public BulkImportJobJson setFileCount(int theFileCount) {
|
||||
myFileCount = theFileCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getBatchSize() {
|
||||
return myBatchSize;
|
||||
}
|
||||
|
||||
public BulkImportJobJson setBatchSize(int theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
return this;
|
||||
}
|
||||
}
|
34
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java
Normal file
34
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/BulkImportJobStatusEnum.java
Normal file
@ -0,0 +1,34 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
|
||||
@JsonFormat(shape = JsonFormat.Shape.STRING)
|
||||
public enum BulkImportJobStatusEnum {
|
||||
|
||||
STAGING,
|
||||
READY,
|
||||
RUNNING,
|
||||
COMPLETE,
|
||||
ERROR
|
||||
|
||||
}
|
34
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java
Normal file
34
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/JobFileRowProcessingModeEnum.java
Normal file
@ -0,0 +1,34 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
|
||||
@JsonFormat(shape = JsonFormat.Shape.STRING)
|
||||
public enum JobFileRowProcessingModeEnum {
|
||||
|
||||
/**
|
||||
* Sorting OK
|
||||
*/
|
||||
|
||||
FHIR_TRANSACTION
|
||||
|
||||
}
|
46
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java
Normal file
46
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/model/ParsedBulkImportRecord.java
Normal file
@ -0,0 +1,46 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.model;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class ParsedBulkImportRecord implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private final String myTenantName;
|
||||
private final IBaseResource myRowContent;
|
||||
|
||||
public ParsedBulkImportRecord(String theTenantName, IBaseResource theRowContent) {
|
||||
myTenantName = theTenantName;
|
||||
myRowContent = theRowContent;
|
||||
}
|
||||
|
||||
public String getTenantName() {
|
||||
return myTenantName;
|
||||
}
|
||||
|
||||
public IBaseResource getRowContent() {
|
||||
return myRowContent;
|
||||
}
|
||||
}
|
280
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java
Normal file
280
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java
Normal file
@ -0,0 +1,280 @@
|
||||
package ca.uhn.fhir.jpa.bulk.imprt.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobFileDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
|
||||
import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.JobParametersBuilder;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataImportSvcImpl.class);
|
||||
@Autowired
|
||||
private IBulkImportJobDao myJobDao;
|
||||
|
||||
@Autowired
|
||||
private IBulkImportJobFileDao myJobFileDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
private TransactionTemplate myTxTemplate;
|
||||
@Autowired
|
||||
private ISchedulerService mySchedulerService;
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myJobSubmitter;
|
||||
@Autowired
|
||||
@Qualifier(BatchJobsConfig.BULK_IMPORT_JOB_NAME)
|
||||
private org.springframework.batch.core.Job myBulkImportJob;
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
myTxTemplate = new TransactionTemplate(myTxManager);
|
||||
|
||||
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||
jobDetail.setId(ActivationJob.class.getName());
|
||||
jobDetail.setJobClass(ActivationJob.class);
|
||||
mySchedulerService.scheduleClusteredJob(10 * DateUtils.MILLIS_PER_SECOND, jobDetail);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public String createNewJob(BulkImportJobJson theJobDescription, @Nonnull List<BulkImportJobFileJson> theInitialFiles) {
|
||||
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription, "Job must not be null");
|
||||
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theJobDescription.getProcessingMode(), "Job File Processing mode must not be null");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theJobDescription.getBatchSize() > 0, "Job File Batch Size must be > 0");
|
||||
|
||||
String jobId = UUID.randomUUID().toString();
|
||||
|
||||
ourLog.info("Creating new Bulk Import job with {} files, assigning job ID: {}", theInitialFiles.size(), jobId);
|
||||
|
||||
BulkImportJobEntity job = new BulkImportJobEntity();
|
||||
job.setJobId(jobId);
|
||||
job.setFileCount(theInitialFiles.size());
|
||||
job.setStatus(BulkImportJobStatusEnum.STAGING);
|
||||
job.setJobDescription(theJobDescription.getJobDescription());
|
||||
job.setBatchSize(theJobDescription.getBatchSize());
|
||||
job.setRowProcessingMode(theJobDescription.getProcessingMode());
|
||||
job = myJobDao.save(job);
|
||||
|
||||
int nextSequence = 0;
|
||||
addFilesToJob(theInitialFiles, job, nextSequence);
|
||||
|
||||
return jobId;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void addFilesToJob(String theJobId, List<BulkImportJobFileJson> theFiles) {
|
||||
ourLog.info("Adding {} files to bulk import job: {}", theFiles.size(), theJobId);
|
||||
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Job %s has status %s and can not be added to", theJobId, job.getStatus());
|
||||
|
||||
addFilesToJob(theFiles, job, job.getFileCount());
|
||||
|
||||
job.setFileCount(job.getFileCount() + theFiles.size());
|
||||
myJobDao.save(job);
|
||||
}
|
||||
|
||||
private BulkImportJobEntity findJobByJobId(String theJobId) {
|
||||
BulkImportJobEntity job = myJobDao
|
||||
.findByJobId(theJobId)
|
||||
.orElseThrow(() -> new InvalidRequestException("Unknown job ID: " + theJobId));
|
||||
return job;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void markJobAsReadyForActivation(String theJobId) {
|
||||
ourLog.info("Activating bulk import job {}", theJobId);
|
||||
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(job.getStatus() == BulkImportJobStatusEnum.STAGING, "Bulk import job %s can not be activated in status: %s", theJobId, job.getStatus());
|
||||
|
||||
job.setStatus(BulkImportJobStatusEnum.READY);
|
||||
myJobDao.save(job);
|
||||
}
|
||||
|
||||
/**
|
||||
* To be called by the job scheduler
|
||||
*/
|
||||
@Transactional(value = Transactional.TxType.NEVER)
|
||||
@Override
|
||||
public boolean activateNextReadyJob() {
|
||||
|
||||
Optional<BulkImportJobEntity> jobToProcessOpt = Objects.requireNonNull(myTxTemplate.execute(t -> {
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
Slice<BulkImportJobEntity> submittedJobs = myJobDao.findByStatus(page, BulkImportJobStatusEnum.READY);
|
||||
if (submittedJobs.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(submittedJobs.getContent().get(0));
|
||||
}));
|
||||
|
||||
if (!jobToProcessOpt.isPresent()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BulkImportJobEntity bulkImportJobEntity = jobToProcessOpt.get();
|
||||
|
||||
String jobUuid = bulkImportJobEntity.getJobId();
|
||||
try {
|
||||
processJob(bulkImportJobEntity);
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failure while preparing bulk export extract", e);
|
||||
myTxTemplate.execute(t -> {
|
||||
Optional<BulkImportJobEntity> submittedJobs = myJobDao.findByJobId(jobUuid);
|
||||
if (submittedJobs.isPresent()) {
|
||||
BulkImportJobEntity jobEntity = submittedJobs.get();
|
||||
jobEntity.setStatus(BulkImportJobStatusEnum.ERROR);
|
||||
jobEntity.setStatusMessage(e.getMessage());
|
||||
myJobDao.save(jobEntity);
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus) {
|
||||
setJobToStatus(theJobId, theStatus, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setJobToStatus(String theJobId, BulkImportJobStatusEnum theStatus, String theStatusMessage) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
job.setStatus(theStatus);
|
||||
job.setStatusMessage(theStatusMessage);
|
||||
myJobDao.save(job);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public BulkImportJobJson fetchJob(String theJobId) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
return job.toJson();
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Override
|
||||
public BulkImportJobFileJson fetchFile(String theJobId, int theFileIndex) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
|
||||
return myJobFileDao
|
||||
.findForJob(job, theFileIndex)
|
||||
.map(t -> t.toJson())
|
||||
.orElseThrow(() -> new IllegalArgumentException("Invalid index " + theFileIndex + " for job " + theJobId));
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteJobFiles(String theJobId) {
|
||||
BulkImportJobEntity job = findJobByJobId(theJobId);
|
||||
List<Long> files = myJobFileDao.findAllIdsForJob(theJobId);
|
||||
for (Long next : files) {
|
||||
myJobFileDao.deleteById(next);
|
||||
}
|
||||
myJobDao.delete(job);
|
||||
}
|
||||
|
||||
private void processJob(BulkImportJobEntity theBulkExportJobEntity) throws JobParametersInvalidException {
|
||||
String jobId = theBulkExportJobEntity.getJobId();
|
||||
int batchSize = theBulkExportJobEntity.getBatchSize();
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(batchSize > 0, "Batch size must be positive");
|
||||
|
||||
JobParametersBuilder parameters = new JobParametersBuilder()
|
||||
.addString(BulkExportJobConfig.JOB_UUID_PARAMETER, jobId)
|
||||
.addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize);
|
||||
|
||||
if(isNotBlank(theBulkExportJobEntity.getJobDescription())) {
|
||||
parameters.addString(BulkExportJobConfig.JOB_DESCRIPTION, theBulkExportJobEntity.getJobDescription());
|
||||
}
|
||||
|
||||
ourLog.info("Submitting bulk import job {} to job scheduler", jobId);
|
||||
|
||||
myJobSubmitter.runJob(myBulkImportJob, parameters.toJobParameters());
|
||||
}
|
||||
|
||||
private void addFilesToJob(@Nonnull List<BulkImportJobFileJson> theInitialFiles, BulkImportJobEntity job, int nextSequence) {
|
||||
for (BulkImportJobFileJson nextFile : theInitialFiles) {
|
||||
ValidateUtil.isNotBlankOrThrowUnprocessableEntity(nextFile.getContents(), "Job File Contents mode must not be null");
|
||||
|
||||
BulkImportJobFileEntity jobFile = new BulkImportJobFileEntity();
|
||||
jobFile.setJob(job);
|
||||
jobFile.setContents(nextFile.getContents());
|
||||
jobFile.setTenantName(nextFile.getTenantName());
|
||||
jobFile.setFileSequence(nextSequence++);
|
||||
myJobFileDao.save(jobFile);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class ActivationJob implements HapiJob {
|
||||
@Autowired
|
||||
private IBulkDataImportSvc myTarget;
|
||||
|
||||
@Override
|
||||
public void execute(JobExecutionContext theContext) {
|
||||
myTarget.activateNextReadyJob();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -11,15 +11,18 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.batch.BatchConstants;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
|
||||
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.binstore.BinaryStorageInterceptor;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkDataExportSvcImpl;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
|
||||
import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider;
|
||||
@ -29,6 +32,7 @@ import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
|
||||
@ -63,7 +67,6 @@ import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
|
||||
import ca.uhn.fhir.jpa.interceptor.MdmSearchExpandingInterceptor;
|
||||
import ca.uhn.fhir.jpa.interceptor.OverridePathBasedReferentialIntegrityForDeletesInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
|
||||
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.packages.IHapiPackageCacheManager;
|
||||
@ -95,8 +98,8 @@ import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.ForcedIdPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder;
|
||||
@ -129,6 +132,7 @@ import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.validation.JpaResourceLoader;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.interceptor.ResponseTerminologyTranslationInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
|
||||
import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
@ -160,6 +164,7 @@ import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.Date;
|
||||
import java.util.concurrent.RejectedExecutionHandler;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
@ -185,7 +190,7 @@ import java.util.Date;
|
||||
@Configuration
|
||||
@EnableJpaRepositories(basePackages = "ca.uhn.fhir.jpa.dao.data")
|
||||
@Import({
|
||||
SearchParamConfig.class, BatchJobsConfig.class
|
||||
SearchParamConfig.class, BatchJobsConfig.class
|
||||
})
|
||||
@EnableBatchProcessing
|
||||
public abstract class BaseConfig {
|
||||
@ -199,24 +204,23 @@ public abstract class BaseConfig {
|
||||
public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER = "PersistedJpaSearchFirstPageBundleProvider";
|
||||
public static final String SEARCH_BUILDER = "SearchBuilder";
|
||||
public static final String HISTORY_BUILDER = "HistoryBuilder";
|
||||
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
|
||||
public static final String REPOSITORY_VALIDATING_RULE_BUILDER = "repositoryValidatingRuleBuilder";
|
||||
|
||||
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
|
||||
@Autowired
|
||||
protected Environment myEnv;
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
private Integer searchCoordCorePoolSize = 20;
|
||||
private Integer searchCoordMaxPoolSize = 100;
|
||||
private Integer searchCoordQueueCapacity = 200;
|
||||
|
||||
/**
|
||||
* Subclasses may override this method to provide settings such as search coordinator pool sizes.
|
||||
*/
|
||||
@PostConstruct
|
||||
public void initSettings() {}
|
||||
|
||||
private Integer searchCoordCorePoolSize = 20;
|
||||
private Integer searchCoordMaxPoolSize = 100;
|
||||
private Integer searchCoordQueueCapacity = 200;
|
||||
public void initSettings() {
|
||||
}
|
||||
|
||||
public void setSearchCoordCorePoolSize(Integer searchCoordCorePoolSize) {
|
||||
this.searchCoordCorePoolSize = searchCoordCorePoolSize;
|
||||
@ -297,6 +301,11 @@ public abstract class BaseConfig {
|
||||
return new SubscriptionTriggeringProvider();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor transactionProcessor() {
|
||||
return new TransactionProcessor();
|
||||
}
|
||||
|
||||
@Bean(name = "myAttachmentBinaryAccessProvider")
|
||||
@Lazy
|
||||
public BinaryAccessProvider binaryAccessProvider() {
|
||||
@ -381,13 +390,15 @@ public abstract class BaseConfig {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Bean(name= BatchConstants.JOB_LAUNCHING_TASK_EXECUTOR)
|
||||
public TaskExecutor jobLaunchingTaskExecutor() {
|
||||
ThreadPoolTaskExecutor asyncTaskExecutor = new ThreadPoolTaskExecutor();
|
||||
asyncTaskExecutor.setCorePoolSize(5);
|
||||
asyncTaskExecutor.setCorePoolSize(0);
|
||||
asyncTaskExecutor.setMaxPoolSize(10);
|
||||
asyncTaskExecutor.setQueueCapacity(500);
|
||||
asyncTaskExecutor.setQueueCapacity(0);
|
||||
asyncTaskExecutor.setAllowCoreThreadTimeOut(true);
|
||||
asyncTaskExecutor.setThreadNamePrefix("JobLauncher-");
|
||||
asyncTaskExecutor.setRejectedExecutionHandler(new ResourceReindexingSvcImpl.BlockPolicy());
|
||||
asyncTaskExecutor.initialize();
|
||||
return asyncTaskExecutor;
|
||||
}
|
||||
@ -514,6 +525,11 @@ public abstract class BaseConfig {
|
||||
return new BulkDataExportProvider();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public IBulkDataImportSvc bulkDataImportSvc() {
|
||||
return new BulkDataImportSvcImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public PersistedJpaBundleProviderFactory persistedJpaBundleProviderFactory() {
|
||||
@ -614,7 +630,7 @@ public abstract class BaseConfig {
|
||||
public QuantityNormalizedPredicateBuilder newQuantityNormalizedPredicateBuilder(SearchQueryBuilder theSearchBuilder) {
|
||||
return new QuantityNormalizedPredicateBuilder(theSearchBuilder);
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Scope("prototype")
|
||||
public ResourceLinkPredicateBuilder newResourceLinkPredicateBuilder(QueryStack theQueryStack, SearchQueryBuilder theSearchBuilder, boolean theReversed) {
|
||||
|
@ -7,6 +7,9 @@ import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessorVersionAdapterDstu2;
|
||||
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
|
||||
import ca.uhn.fhir.jpa.term.TermReadSvcDstu2;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
@ -93,6 +96,12 @@ public class BaseDstu2Config extends BaseConfig {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor.ITransactionProcessorVersionAdapter transactionProcessorVersionFacade() {
|
||||
return new TransactionProcessorVersionAdapterDstu2();
|
||||
}
|
||||
|
||||
|
||||
@Bean(name = "myDefaultProfileValidationSupport")
|
||||
public DefaultProfileValidationSupport defaultProfileValidationSupport() {
|
||||
return new DefaultProfileValidationSupport(fhirContext());
|
||||
|
@ -87,11 +87,6 @@ public class BaseDstu3Config extends BaseConfigDstu3Plus {
|
||||
return new TransactionProcessorVersionAdapterDstu3();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor transactionProcessor() {
|
||||
return new TransactionProcessor();
|
||||
}
|
||||
|
||||
@Bean(name = "myResourceCountsCache")
|
||||
public ResourceCountCache resourceCountsCache() {
|
||||
ResourceCountCache retVal = new ResourceCountCache(() -> systemDaoDstu3().getResourceCounts());
|
||||
|
@ -82,11 +82,6 @@ public class BaseR4Config extends BaseConfigDstu3Plus {
|
||||
return new TransactionProcessorVersionAdapterR4();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor transactionProcessor() {
|
||||
return new TransactionProcessor();
|
||||
}
|
||||
|
||||
@Bean(name = GRAPHQL_PROVIDER_NAME)
|
||||
@Lazy
|
||||
public GraphQLProvider graphQLProvider() {
|
||||
|
@ -80,11 +80,6 @@ public class BaseR5Config extends BaseConfigDstu3Plus {
|
||||
return new TransactionProcessorVersionAdapterR5();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TransactionProcessor transactionProcessor() {
|
||||
return new TransactionProcessor();
|
||||
}
|
||||
|
||||
@Bean(name = GRAPHQL_PROVIDER_NAME)
|
||||
@Lazy
|
||||
public GraphQLProvider graphQLProvider() {
|
||||
|
@ -3,13 +3,14 @@ package ca.uhn.fhir.jpa.dao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
@ -17,6 +18,7 @@ import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@ -42,14 +44,26 @@ import java.util.Map;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBaseResource> implements IFhirSystemDao<T, MT> {
|
||||
public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends BaseHapiFhirDao<IBaseResource> implements IFhirSystemDao<T, MT> {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirSystemDao.class);
|
||||
@Autowired
|
||||
@Qualifier("myResourceCountsCache")
|
||||
public ResourceCountCache myResourceCountsCache;
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
private TransactionProcessor myTransactionProcessor;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) {
|
||||
myTransactionProcessor = theTransactionProcessor;
|
||||
}
|
||||
|
||||
@Override
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
super.start();
|
||||
myTransactionProcessor.setDao(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@ -91,6 +105,18 @@ public abstract class BaseHapiFhirSystemDao<T, MT> extends BaseHapiFhirDao<IBase
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public T transaction(RequestDetails theRequestDetails, T theRequest) {
|
||||
return myTransactionProcessor.transaction(theRequestDetails, theRequest, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.MANDATORY)
|
||||
public T transactionNested(RequestDetails theRequestDetails, T theRequest) {
|
||||
return myTransactionProcessor.transaction(theRequestDetails, theRequest, true);
|
||||
}
|
||||
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
|
@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
@ -143,14 +144,14 @@ public abstract class BaseTransactionProcessor {
|
||||
ourLog.trace("Starting transaction processor");
|
||||
}
|
||||
|
||||
public <BUNDLE extends IBaseBundle> BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest) {
|
||||
public <BUNDLE extends IBaseBundle> BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest, boolean theNestedMode) {
|
||||
if (theRequestDetails != null && theRequestDetails.getServer() != null && myDao != null) {
|
||||
IServerInterceptor.ActionRequestDetails requestDetails = new IServerInterceptor.ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null);
|
||||
myDao.notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails);
|
||||
}
|
||||
|
||||
String actionName = "Transaction";
|
||||
IBaseBundle response = processTransactionAsSubRequest((RequestDetails) theRequestDetails, theRequest, actionName);
|
||||
IBaseBundle response = processTransactionAsSubRequest(theRequestDetails, theRequest, actionName, theNestedMode);
|
||||
|
||||
List<IBase> entries = myVersionAdapter.getEntries(response);
|
||||
for (int i = 0; i < entries.size(); i++) {
|
||||
@ -191,7 +192,7 @@ public abstract class BaseTransactionProcessor {
|
||||
myVersionAdapter.setRequestUrl(entry, next.getIdElement().toUnqualifiedVersionless().getValue());
|
||||
}
|
||||
|
||||
transaction(theRequestDetails, transactionBundle);
|
||||
transaction(theRequestDetails, transactionBundle, false);
|
||||
|
||||
return resp;
|
||||
}
|
||||
@ -271,10 +272,10 @@ public abstract class BaseTransactionProcessor {
|
||||
myDao = theDao;
|
||||
}
|
||||
|
||||
private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName) {
|
||||
private IBaseBundle processTransactionAsSubRequest(RequestDetails theRequestDetails, IBaseBundle theRequest, String theActionName, boolean theNestedMode) {
|
||||
BaseHapiFhirDao.markRequestAsProcessingSubRequest(theRequestDetails);
|
||||
try {
|
||||
return processTransaction(theRequestDetails, theRequest, theActionName);
|
||||
return processTransaction(theRequestDetails, theRequest, theActionName, theNestedMode);
|
||||
} finally {
|
||||
BaseHapiFhirDao.clearRequestAsProcessingSubRequest(theRequestDetails);
|
||||
}
|
||||
@ -290,7 +291,7 @@ public abstract class BaseTransactionProcessor {
|
||||
myTxManager = theTxManager;
|
||||
}
|
||||
|
||||
private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest) {
|
||||
private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest, boolean theNestedMode) {
|
||||
ourLog.info("Beginning batch with {} resources", myVersionAdapter.getEntries(theRequest).size());
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
@ -311,7 +312,7 @@ public abstract class BaseTransactionProcessor {
|
||||
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
|
||||
myVersionAdapter.addEntry(subRequestBundle, (IBase) nextRequestEntry);
|
||||
|
||||
IBaseBundle nextResponseBundle = processTransactionAsSubRequest((ServletRequestDetails) theRequestDetails, subRequestBundle, "Batch sub-request");
|
||||
IBaseBundle nextResponseBundle = processTransactionAsSubRequest(theRequestDetails, subRequestBundle, "Batch sub-request", theNestedMode);
|
||||
|
||||
IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
|
||||
myVersionAdapter.addEntry(resp, subResponseEntry);
|
||||
@ -342,7 +343,7 @@ public abstract class BaseTransactionProcessor {
|
||||
}
|
||||
|
||||
long delay = System.currentTimeMillis() - start;
|
||||
ourLog.info("Batch completed in {}ms", new Object[]{delay});
|
||||
ourLog.info("Batch completed in {}ms", delay);
|
||||
|
||||
return resp;
|
||||
}
|
||||
@ -352,13 +353,13 @@ public abstract class BaseTransactionProcessor {
|
||||
myHapiTransactionService = theHapiTransactionService;
|
||||
}
|
||||
|
||||
private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName) {
|
||||
private IBaseBundle processTransaction(final RequestDetails theRequestDetails, final IBaseBundle theRequest, final String theActionName, boolean theNestedMode) {
|
||||
validateDependencies();
|
||||
|
||||
String transactionType = myVersionAdapter.getBundleType(theRequest);
|
||||
|
||||
if (org.hl7.fhir.r4.model.Bundle.BundleType.BATCH.toCode().equals(transactionType)) {
|
||||
return batch(theRequestDetails, theRequest);
|
||||
return batch(theRequestDetails, theRequest, theNestedMode);
|
||||
}
|
||||
|
||||
if (transactionType == null) {
|
||||
@ -466,6 +467,10 @@ public abstract class BaseTransactionProcessor {
|
||||
}
|
||||
for (IBase nextReqEntry : getEntries) {
|
||||
|
||||
if (theNestedMode) {
|
||||
throw new InvalidRequestException("Can not invoke read operation on nested transaction");
|
||||
}
|
||||
|
||||
if (!(theRequestDetails instanceof ServletRequestDetails)) {
|
||||
throw new MethodNotAllowedException("Can not call transaction GET methods from this context");
|
||||
}
|
||||
@ -977,7 +982,12 @@ public abstract class BaseTransactionProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
IPrimitiveType<Date> deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource);
|
||||
IPrimitiveType<Date> deletedInstantOrNull;
|
||||
if (nextResource instanceof IAnyResource) {
|
||||
deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) nextResource);
|
||||
} else {
|
||||
deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) nextResource);
|
||||
}
|
||||
Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null;
|
||||
|
||||
IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(nextResource.getClass());
|
||||
|
@ -20,559 +20,19 @@ package ca.uhn.fhir.jpa.dao;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.model.base.composite.BaseResourceReferenceDt;
|
||||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle.EntryResponse;
|
||||
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.model.primitive.UriDt;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.NotModifiedException;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.method.BaseMethodBinding;
|
||||
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletSubRequestDetails;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil.UrlParts;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.persistence.TypedQuery;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao<Bundle, MetaDt> {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoDstu2.class);
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private MatchResourceUrlService myMatchResourceUrlService;
|
||||
@Autowired
|
||||
private HapiTransactionService myHapiTransactionalService;
|
||||
|
||||
private Bundle batch(final RequestDetails theRequestDetails, Bundle theRequest) {
|
||||
ourLog.info("Beginning batch with {} resources", theRequest.getEntry().size());
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
Bundle resp = new Bundle();
|
||||
resp.setType(BundleTypeEnum.BATCH_RESPONSE);
|
||||
|
||||
/*
|
||||
* For batch, we handle each entry as a mini-transaction in its own database transaction so that if one fails, it doesn't prevent others
|
||||
*/
|
||||
|
||||
for (final Entry nextRequestEntry : theRequest.getEntry()) {
|
||||
|
||||
TransactionCallback<Bundle> callback = new TransactionCallback<Bundle>() {
|
||||
@Override
|
||||
public Bundle doInTransaction(TransactionStatus theStatus) {
|
||||
Bundle subRequestBundle = new Bundle();
|
||||
subRequestBundle.setType(BundleTypeEnum.TRANSACTION);
|
||||
subRequestBundle.addEntry(nextRequestEntry);
|
||||
return transaction((ServletRequestDetails) theRequestDetails, subRequestBundle, "Batch sub-request");
|
||||
}
|
||||
};
|
||||
|
||||
BaseServerResponseException caughtEx;
|
||||
try {
|
||||
Bundle nextResponseBundle;
|
||||
if (nextRequestEntry.getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.GET) {
|
||||
// Don't process GETs in a transaction because they'll
|
||||
// create their own
|
||||
nextResponseBundle = callback.doInTransaction(null);
|
||||
} else {
|
||||
nextResponseBundle = myHapiTransactionalService.execute(theRequestDetails, callback);
|
||||
}
|
||||
caughtEx = null;
|
||||
|
||||
Entry subResponseEntry = nextResponseBundle.getEntry().get(0);
|
||||
resp.addEntry(subResponseEntry);
|
||||
/*
|
||||
* If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it
|
||||
*/
|
||||
if (subResponseEntry.getResource() == null) {
|
||||
subResponseEntry.setResource(nextResponseBundle.getEntry().get(0).getResource());
|
||||
}
|
||||
|
||||
} catch (BaseServerResponseException e) {
|
||||
caughtEx = e;
|
||||
} catch (Throwable t) {
|
||||
ourLog.error("Failure during BATCH sub transaction processing", t);
|
||||
caughtEx = new InternalErrorException(t);
|
||||
}
|
||||
|
||||
if (caughtEx != null) {
|
||||
Entry nextEntry = resp.addEntry();
|
||||
|
||||
OperationOutcome oo = new OperationOutcome();
|
||||
oo.addIssue().setSeverity(IssueSeverityEnum.ERROR).setDiagnostics(caughtEx.getMessage());
|
||||
nextEntry.setResource(oo);
|
||||
|
||||
EntryResponse nextEntryResp = nextEntry.getResponse();
|
||||
nextEntryResp.setStatus(toStatusString(caughtEx.getStatusCode()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
long delay = System.currentTimeMillis() - start;
|
||||
ourLog.info("Batch completed in {}ms", new Object[] {delay});
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Bundle doTransaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) {
|
||||
BundleTypeEnum transactionType = theRequest.getTypeElement().getValueAsEnum();
|
||||
if (transactionType == BundleTypeEnum.BATCH) {
|
||||
return batch(theRequestDetails, theRequest);
|
||||
}
|
||||
|
||||
return doTransaction(theRequestDetails, theRequest, theActionName, transactionType);
|
||||
}
|
||||
|
||||
private Bundle doTransaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName, BundleTypeEnum theTransactionType) {
|
||||
if (theTransactionType == null) {
|
||||
String message = "Transaction Bundle did not specify valid Bundle.type, assuming " + BundleTypeEnum.TRANSACTION.getCode();
|
||||
ourLog.warn(message);
|
||||
theTransactionType = BundleTypeEnum.TRANSACTION;
|
||||
}
|
||||
if (theTransactionType != BundleTypeEnum.TRANSACTION) {
|
||||
throw new InvalidRequestException("Unable to process transaction where incoming Bundle.type = " + theTransactionType.getCode());
|
||||
}
|
||||
|
||||
ourLog.info("Beginning {} with {} resources", theActionName, theRequest.getEntry().size());
|
||||
|
||||
long start = System.currentTimeMillis();
|
||||
TransactionDetails transactionDetails = new TransactionDetails();
|
||||
|
||||
Set<IdDt> allIds = new LinkedHashSet<IdDt>();
|
||||
Map<IdDt, IdDt> idSubstitutions = new HashMap<IdDt, IdDt>();
|
||||
Map<IdDt, DaoMethodOutcome> idToPersistedOutcome = new HashMap<IdDt, DaoMethodOutcome>();
|
||||
|
||||
/*
|
||||
* We want to execute the transaction request bundle elements in the order
|
||||
* specified by the FHIR specification (see TransactionSorter) so we save the
|
||||
* original order in the request, then sort it.
|
||||
*
|
||||
* Entries with a type of GET are removed from the bundle so that they
|
||||
* can be processed at the very end. We do this because the incoming resources
|
||||
* are saved in a two-phase way in order to deal with interdependencies, and
|
||||
* we want the GET processing to use the final indexing state
|
||||
*/
|
||||
Bundle response = new Bundle();
|
||||
List<Entry> getEntries = new ArrayList<Entry>();
|
||||
IdentityHashMap<Entry, Integer> originalRequestOrder = new IdentityHashMap<Entry, Integer>();
|
||||
for (int i = 0; i < theRequest.getEntry().size(); i++) {
|
||||
originalRequestOrder.put(theRequest.getEntry().get(i), i);
|
||||
response.addEntry();
|
||||
if (theRequest.getEntry().get(i).getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.GET) {
|
||||
getEntries.add(theRequest.getEntry().get(i));
|
||||
}
|
||||
}
|
||||
Collections.sort(theRequest.getEntry(), new TransactionSorter());
|
||||
|
||||
List<IIdType> deletedResources = new ArrayList<>();
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
Map<Entry, IBasePersistedResource> entriesToProcess = new IdentityHashMap<>();
|
||||
Set<IBasePersistedResource> nonUpdatedEntities = new HashSet<>();
|
||||
Set<IBasePersistedResource> updatedEntities = new HashSet<>();
|
||||
|
||||
/*
|
||||
* Handle: GET/PUT/POST
|
||||
*/
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
txTemplate.execute(t->{
|
||||
handleTransactionWriteOperations(theRequestDetails, theRequest, theActionName, transactionDetails, allIds, idSubstitutions, idToPersistedOutcome, response, originalRequestOrder, deletedResources, deleteConflicts, entriesToProcess, nonUpdatedEntities, updatedEntities);
|
||||
return null;
|
||||
});
|
||||
|
||||
/*
|
||||
* Loop through the request and process any entries of type GET
|
||||
*/
|
||||
for (int i = 0; i < getEntries.size(); i++) {
|
||||
Entry nextReqEntry = getEntries.get(i);
|
||||
Integer originalOrder = originalRequestOrder.get(nextReqEntry);
|
||||
Entry nextRespEntry = response.getEntry().get(originalOrder);
|
||||
|
||||
ServletSubRequestDetails requestDetails = new ServletSubRequestDetails(theRequestDetails);
|
||||
requestDetails.setServletRequest(theRequestDetails.getServletRequest());
|
||||
requestDetails.setRequestType(RequestTypeEnum.GET);
|
||||
requestDetails.setServer(theRequestDetails.getServer());
|
||||
|
||||
String url = extractTransactionUrlOrThrowException(nextReqEntry, HTTPVerbEnum.GET);
|
||||
|
||||
int qIndex = url.indexOf('?');
|
||||
ArrayListMultimap<String, String> paramValues = ArrayListMultimap.create();
|
||||
requestDetails.setParameters(new HashMap<String, String[]>());
|
||||
if (qIndex != -1) {
|
||||
String params = url.substring(qIndex);
|
||||
List<NameValuePair> parameters = UrlUtil.translateMatchUrl(params);
|
||||
for (NameValuePair next : parameters) {
|
||||
paramValues.put(next.getName(), next.getValue());
|
||||
}
|
||||
for (Map.Entry<String, Collection<String>> nextParamEntry : paramValues.asMap().entrySet()) {
|
||||
String[] nextValue = nextParamEntry.getValue().toArray(new String[nextParamEntry.getValue().size()]);
|
||||
requestDetails.addParameter(nextParamEntry.getKey(), nextValue);
|
||||
}
|
||||
url = url.substring(0, qIndex);
|
||||
}
|
||||
|
||||
requestDetails.setRequestPath(url);
|
||||
requestDetails.setFhirServerBase(theRequestDetails.getFhirServerBase());
|
||||
|
||||
theRequestDetails.getServer().populateRequestDetailsFromRequestPath(requestDetails, url);
|
||||
BaseMethodBinding<?> method = theRequestDetails.getServer().determineResourceMethod(requestDetails, url);
|
||||
if (method == null) {
|
||||
throw new IllegalArgumentException("Unable to handle GET " + url);
|
||||
}
|
||||
|
||||
if (isNotBlank(nextReqEntry.getRequest().getIfMatch())) {
|
||||
requestDetails.addHeader(Constants.HEADER_IF_MATCH, nextReqEntry.getRequest().getIfMatch());
|
||||
}
|
||||
if (isNotBlank(nextReqEntry.getRequest().getIfNoneExist())) {
|
||||
requestDetails.addHeader(Constants.HEADER_IF_NONE_EXIST, nextReqEntry.getRequest().getIfNoneExist());
|
||||
}
|
||||
if (isNotBlank(nextReqEntry.getRequest().getIfNoneMatch())) {
|
||||
requestDetails.addHeader(Constants.HEADER_IF_NONE_MATCH, nextReqEntry.getRequest().getIfNoneMatch());
|
||||
}
|
||||
|
||||
if (method instanceof BaseResourceReturningMethodBinding) {
|
||||
try {
|
||||
IBaseResource resource = ((BaseResourceReturningMethodBinding) method).doInvokeServer(theRequestDetails.getServer(), requestDetails);
|
||||
if (paramValues.containsKey(Constants.PARAM_SUMMARY) || paramValues.containsKey(Constants.PARAM_CONTENT)) {
|
||||
resource = filterNestedBundle(requestDetails, resource);
|
||||
}
|
||||
nextRespEntry.setResource((IResource) resource);
|
||||
nextRespEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK));
|
||||
} catch (NotModifiedException e) {
|
||||
nextRespEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_304_NOT_MODIFIED));
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unable to handle GET " + url);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for (Map.Entry<Entry, IBasePersistedResource> nextEntry : entriesToProcess.entrySet()) {
|
||||
nextEntry.getKey().getResponse().setLocation(nextEntry.getValue().getIdDt().toUnqualified().getValue());
|
||||
nextEntry.getKey().getResponse().setEtag(nextEntry.getValue().getIdDt().getVersionIdPart());
|
||||
}
|
||||
|
||||
long delay = System.currentTimeMillis() - start;
|
||||
int numEntries = theRequest.getEntry().size();
|
||||
long delayPer = delay / numEntries;
|
||||
ourLog.info("{} completed in {}ms ({} entries at {}ms per entry)", theActionName, delay, numEntries, delayPer);
|
||||
|
||||
response.setType(BundleTypeEnum.TRANSACTION_RESPONSE);
|
||||
return response;
|
||||
}
|
||||
|
||||
private void handleTransactionWriteOperations(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IdDt> theAllIds, Map<IdDt, IdDt> theIdSubstitutions, Map<IdDt, DaoMethodOutcome> theIdToPersistedOutcome, Bundle theResponse, IdentityHashMap<Entry, Integer> theOriginalRequestOrder, List<IIdType> theDeletedResources, DeleteConflictList theDeleteConflicts, Map<Entry, IBasePersistedResource> theEntriesToProcess, Set<IBasePersistedResource> theNonUpdatedEntities, Set<IBasePersistedResource> theUpdatedEntities) {
|
||||
/*
|
||||
* Loop through the request and process any entries of type
|
||||
* PUT, POST or DELETE
|
||||
*/
|
||||
for (int i = 0; i < theRequest.getEntry().size(); i++) {
|
||||
|
||||
if (i % 100 == 0) {
|
||||
ourLog.debug("Processed {} non-GET entries out of {}", i, theRequest.getEntry().size());
|
||||
}
|
||||
|
||||
Entry nextReqEntry = theRequest.getEntry().get(i);
|
||||
IResource res = nextReqEntry.getResource();
|
||||
IdDt nextResourceId = null;
|
||||
if (res != null) {
|
||||
|
||||
nextResourceId = res.getId();
|
||||
|
||||
if (!nextResourceId.hasIdPart()) {
|
||||
if (isNotBlank(nextReqEntry.getFullUrl())) {
|
||||
nextResourceId = new IdDt(nextReqEntry.getFullUrl());
|
||||
}
|
||||
}
|
||||
|
||||
if (nextResourceId.hasIdPart() && nextResourceId.getIdPart().matches("[a-zA-Z]+:.*") && !isPlaceholder(nextResourceId)) {
|
||||
throw new InvalidRequestException("Invalid placeholder ID found: " + nextResourceId.getIdPart() + " - Must be of the form 'urn:uuid:[uuid]' or 'urn:oid:[oid]'");
|
||||
}
|
||||
|
||||
if (nextResourceId.hasIdPart() && !nextResourceId.hasResourceType() && !isPlaceholder(nextResourceId)) {
|
||||
nextResourceId = new IdDt(toResourceName(res.getClass()), nextResourceId.getIdPart());
|
||||
res.setId(nextResourceId);
|
||||
}
|
||||
|
||||
/*
|
||||
* Ensure that the bundle doesn't have any duplicates, since this causes all kinds of weirdness
|
||||
*/
|
||||
if (isPlaceholder(nextResourceId)) {
|
||||
if (!theAllIds.add(nextResourceId)) {
|
||||
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionContainsMultipleWithDuplicateId", nextResourceId));
|
||||
}
|
||||
} else if (nextResourceId.hasResourceType() && nextResourceId.hasIdPart()) {
|
||||
IdDt nextId = nextResourceId.toUnqualifiedVersionless();
|
||||
if (!theAllIds.add(nextId)) {
|
||||
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionContainsMultipleWithDuplicateId", nextId));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
HTTPVerbEnum verb = nextReqEntry.getRequest().getMethodElement().getValueAsEnum();
|
||||
if (verb == null) {
|
||||
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionEntryHasInvalidVerb", nextReqEntry.getRequest().getMethod()));
|
||||
}
|
||||
|
||||
String resourceType = res != null ? getContext().getResourceType(res) : null;
|
||||
Entry nextRespEntry = theResponse.getEntry().get(theOriginalRequestOrder.get(nextReqEntry));
|
||||
|
||||
switch (verb) {
|
||||
case POST: {
|
||||
// CREATE
|
||||
@SuppressWarnings("rawtypes")
|
||||
IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass());
|
||||
res.setId((String) null);
|
||||
DaoMethodOutcome outcome;
|
||||
outcome = resourceDao.create(res, nextReqEntry.getRequest().getIfNoneExist(), false, theTransactionDetails, theRequestDetails);
|
||||
handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res);
|
||||
theEntriesToProcess.put(nextRespEntry, outcome.getEntity());
|
||||
if (outcome.getCreated() == false) {
|
||||
theNonUpdatedEntities.add(outcome.getEntity());
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DELETE: {
|
||||
// DELETE
|
||||
String url = extractTransactionUrlOrThrowException(nextReqEntry, verb);
|
||||
UrlParts parts = UrlUtil.parseUrl(url);
|
||||
IFhirResourceDao<? extends IBaseResource> dao = toDao(parts, verb.getCode(), url);
|
||||
int status = Constants.STATUS_HTTP_204_NO_CONTENT;
|
||||
if (parts.getResourceId() != null) {
|
||||
DaoMethodOutcome outcome = dao.delete(new IdDt(parts.getResourceType(), parts.getResourceId()), theDeleteConflicts, theRequestDetails, theTransactionDetails);
|
||||
if (outcome.getEntity() != null) {
|
||||
theDeletedResources.add(outcome.getId().toUnqualifiedVersionless());
|
||||
theEntriesToProcess.put(nextRespEntry, outcome.getEntity());
|
||||
}
|
||||
} else {
|
||||
DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(parts.getResourceType() + '?' + parts.getParams(), theDeleteConflicts, theRequestDetails);
|
||||
List<ResourceTable> allDeleted = deleteOutcome.getDeletedEntities();
|
||||
for (ResourceTable deleted : allDeleted) {
|
||||
theDeletedResources.add(deleted.getIdDt().toUnqualifiedVersionless());
|
||||
}
|
||||
if (allDeleted.isEmpty()) {
|
||||
status = Constants.STATUS_HTTP_404_NOT_FOUND;
|
||||
}
|
||||
}
|
||||
|
||||
nextRespEntry.getResponse().setStatus(toStatusString(status));
|
||||
break;
|
||||
}
|
||||
case PUT: {
|
||||
// UPDATE
|
||||
@SuppressWarnings("rawtypes")
|
||||
IFhirResourceDao resourceDao = myDaoRegistry.getResourceDao(res.getClass());
|
||||
|
||||
DaoMethodOutcome outcome;
|
||||
|
||||
String url = extractTransactionUrlOrThrowException(nextReqEntry, verb);
|
||||
|
||||
UrlParts parts = UrlUtil.parseUrl(url);
|
||||
if (isNotBlank(parts.getResourceId())) {
|
||||
res.setId(new IdDt(parts.getResourceType(), parts.getResourceId()));
|
||||
outcome = resourceDao.update(res, null, false, theRequestDetails);
|
||||
} else {
|
||||
res.setId((String) null);
|
||||
outcome = resourceDao.update(res, parts.getResourceType() + '?' + parts.getParams(), false, theRequestDetails);
|
||||
}
|
||||
|
||||
if (outcome.getCreated() == Boolean.FALSE) {
|
||||
theUpdatedEntities.add(outcome.getEntity());
|
||||
}
|
||||
|
||||
handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res);
|
||||
theEntriesToProcess.put(nextRespEntry, outcome.getEntity());
|
||||
break;
|
||||
}
|
||||
case GET:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Make sure that there are no conflicts from deletions. E.g. we can't delete something
|
||||
* if something else has a reference to it.. Unless the thing that has a reference to it
|
||||
* was also deleted as a part of this transaction, which is why we check this now at the
|
||||
* end.
|
||||
*/
|
||||
|
||||
theDeleteConflicts.removeIf(next -> theDeletedResources.contains(next.getTargetId().toVersionless()));
|
||||
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(getContext(), theDeleteConflicts);
|
||||
|
||||
/*
|
||||
* Perform ID substitutions and then index each resource we have saved
|
||||
*/
|
||||
|
||||
FhirTerser terser = getContext().newTerser();
|
||||
for (DaoMethodOutcome nextOutcome : theIdToPersistedOutcome.values()) {
|
||||
IResource nextResource = (IResource) nextOutcome.getResource();
|
||||
if (nextResource == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// References
|
||||
List<BaseResourceReferenceDt> allRefs = terser.getAllPopulatedChildElementsOfType(nextResource, BaseResourceReferenceDt.class);
|
||||
for (BaseResourceReferenceDt nextRef : allRefs) {
|
||||
IdDt nextId = nextRef.getReference();
|
||||
if (!nextId.hasIdPart()) {
|
||||
continue;
|
||||
}
|
||||
if (theIdSubstitutions.containsKey(nextId)) {
|
||||
IdDt newId = theIdSubstitutions.get(nextId);
|
||||
ourLog.debug(" * Replacing resource ref {} with {}", nextId, newId);
|
||||
nextRef.setReference(newId);
|
||||
} else {
|
||||
ourLog.debug(" * Reference [{}] does not exist in bundle", nextId);
|
||||
}
|
||||
}
|
||||
|
||||
// URIs
|
||||
List<UriDt> allUris = terser.getAllPopulatedChildElementsOfType(nextResource, UriDt.class);
|
||||
for (UriDt nextRef : allUris) {
|
||||
if (nextRef instanceof IIdType) {
|
||||
continue; // No substitution on the resource ID itself!
|
||||
}
|
||||
IdDt nextUriString = new IdDt(nextRef.getValueAsString());
|
||||
if (theIdSubstitutions.containsKey(nextUriString)) {
|
||||
IdDt newId = theIdSubstitutions.get(nextUriString);
|
||||
ourLog.debug(" * Replacing resource ref {} with {}", nextUriString, newId);
|
||||
nextRef.setValue(newId.getValue());
|
||||
} else {
|
||||
ourLog.debug(" * Reference [{}] does not exist in bundle", nextUriString);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
InstantDt deletedInstantOrNull = ResourceMetadataKeyEnum.DELETED_AT.get(nextResource);
|
||||
Date deletedTimestampOrNull = deletedInstantOrNull != null ? deletedInstantOrNull.getValue() : null;
|
||||
if (theUpdatedEntities.contains(nextOutcome.getEntity())) {
|
||||
updateInternal(theRequestDetails, nextResource, true, false, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource(), theTransactionDetails);
|
||||
} else if (!theNonUpdatedEntities.contains(nextOutcome.getEntity())) {
|
||||
updateEntity(theRequestDetails, nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, true, false, theTransactionDetails, false, true);
|
||||
}
|
||||
}
|
||||
|
||||
myEntityManager.flush();
|
||||
|
||||
/*
|
||||
* Double check we didn't allow any duplicates we shouldn't have
|
||||
*/
|
||||
for (Entry nextEntry : theRequest.getEntry()) {
|
||||
if (nextEntry.getRequest().getMethodElement().getValueAsEnum() == HTTPVerbEnum.POST) {
|
||||
String matchUrl = nextEntry.getRequest().getIfNoneExist();
|
||||
if (isNotBlank(matchUrl)) {
|
||||
Class<? extends IBaseResource> resType = nextEntry.getResource().getClass();
|
||||
Set<ResourcePersistentId> val = myMatchResourceUrlService.processMatchUrl(matchUrl, resType, theRequestDetails);
|
||||
if (val.size() > 1) {
|
||||
throw new InvalidRequestException(
|
||||
"Unable to process " + theActionName + " - Request would cause multiple resources to match URL: \"" + matchUrl + "\". Does transaction request contain duplicates?");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (IdDt next : theAllIds) {
|
||||
IdDt replacement = theIdSubstitutions.get(next);
|
||||
if (replacement == null) {
|
||||
continue;
|
||||
}
|
||||
if (replacement.equals(next)) {
|
||||
continue;
|
||||
}
|
||||
ourLog.debug("Placeholder resource ID \"{}\" was replaced with permanent ID \"{}\"", next, replacement);
|
||||
}
|
||||
}
|
||||
|
||||
private String extractTransactionUrlOrThrowException(Entry nextEntry, HTTPVerbEnum verb) {
|
||||
String url = nextEntry.getRequest().getUrl();
|
||||
if (isBlank(url)) {
|
||||
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionMissingUrl", verb.name()));
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is called for nested bundles (e.g. if we received a transaction with an entry that
|
||||
* was a GET search, this method is called on the bundle for the search result, that will be placed in the
|
||||
* outer bundle). This method applies the _summary and _content parameters to the output of
|
||||
* that bundle.
|
||||
* <p>
|
||||
* TODO: This isn't the most efficient way of doing this.. hopefully we can come up with something better in the future.
|
||||
*/
|
||||
private IBaseResource filterNestedBundle(RequestDetails theRequestDetails, IBaseResource theResource) {
|
||||
IParser p = getContext().newJsonParser();
|
||||
RestfulServerUtils.configureResponseParser(theRequestDetails, p);
|
||||
return p.parseResource(theResource.getClass(), p.encodeResourceToString(theResource));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetaDt metaGetOperation(RequestDetails theRequestDetails) {
|
||||
@ -589,31 +49,6 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao<Bundle, MetaDt> {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private IFhirResourceDao<? extends IBaseResource> toDao(UrlParts theParts, String theVerb, String theUrl) {
|
||||
RuntimeResourceDefinition resType;
|
||||
try {
|
||||
resType = getContext().getResourceDefinition(theParts.getResourceType());
|
||||
} catch (DataFormatException e) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
IFhirResourceDao<? extends IBaseResource> dao = null;
|
||||
if (resType != null) {
|
||||
dao = this.myDaoRegistry.getResourceDaoOrNull(resType.getImplementingClass());
|
||||
}
|
||||
if (dao == null) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
|
||||
throw new InvalidRequestException(msg);
|
||||
}
|
||||
|
||||
// if (theParts.getResourceId() == null && theParts.getParams() == null) {
|
||||
// String msg = getContext().getLocalizer().getMessage(BaseHapiFhirSystemDao.class, "transactionInvalidUrl", theVerb, theUrl);
|
||||
// throw new InvalidRequestException(msg);
|
||||
// }
|
||||
|
||||
return dao;
|
||||
}
|
||||
|
||||
protected MetaDt toMetaDt(Collection<TagDefinition> tagDefinitions) {
|
||||
MetaDt retVal = new MetaDt();
|
||||
for (TagDefinition next : tagDefinitions) {
|
||||
@ -632,105 +67,9 @@ public class FhirSystemDaoDstu2 extends BaseHapiFhirSystemDao<Bundle, MetaDt> {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
|
||||
if (theRequestDetails != null) {
|
||||
ActionRequestDetails requestDetails = new ActionRequestDetails(theRequestDetails, theRequest, "Bundle", null);
|
||||
notifyInterceptors(RestOperationTypeEnum.TRANSACTION, requestDetails);
|
||||
}
|
||||
|
||||
String actionName = "Transaction";
|
||||
return transaction((ServletRequestDetails) theRequestDetails, theRequest, actionName);
|
||||
}
|
||||
|
||||
private Bundle transaction(ServletRequestDetails theRequestDetails, Bundle theRequest, String theActionName) {
|
||||
markRequestAsProcessingSubRequest(theRequestDetails);
|
||||
try {
|
||||
return doTransaction(theRequestDetails, theRequest, theActionName);
|
||||
} finally {
|
||||
clearRequestAsProcessingSubRequest(theRequestDetails);
|
||||
}
|
||||
}
|
||||
|
||||
private static void handleTransactionCreateOrUpdateOutcome(Map<IdDt, IdDt> idSubstitutions, Map<IdDt, DaoMethodOutcome> idToPersistedOutcome, IdDt nextResourceId, DaoMethodOutcome outcome,
|
||||
Entry newEntry, String theResourceType, IResource theRes) {
|
||||
IdDt newId = (IdDt) outcome.getId().toUnqualifiedVersionless();
|
||||
IdDt resourceId = isPlaceholder(nextResourceId) ? nextResourceId : nextResourceId.toUnqualifiedVersionless();
|
||||
if (newId.equals(resourceId) == false) {
|
||||
idSubstitutions.put(resourceId, newId);
|
||||
if (isPlaceholder(resourceId)) {
|
||||
/*
|
||||
* The correct way for substitution IDs to be is to be with no resource type, but we'll accept the qualified kind too just to be lenient.
|
||||
*/
|
||||
idSubstitutions.put(new IdDt(theResourceType + '/' + resourceId.getValue()), newId);
|
||||
}
|
||||
}
|
||||
idToPersistedOutcome.put(newId, outcome);
|
||||
if (outcome.getCreated().booleanValue()) {
|
||||
newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_201_CREATED));
|
||||
} else {
|
||||
newEntry.getResponse().setStatus(toStatusString(Constants.STATUS_HTTP_200_OK));
|
||||
}
|
||||
newEntry.getResponse().setLastModified(ResourceMetadataKeyEnum.UPDATED.get(theRes));
|
||||
}
|
||||
|
||||
private static boolean isPlaceholder(IdDt theId) {
|
||||
if (theId.getValue() != null) {
|
||||
return theId.getValue().startsWith("urn:oid:") || theId.getValue().startsWith("urn:uuid:");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static String toStatusString(int theStatusCode) {
|
||||
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseBundle processMessage(RequestDetails theRequestDetails, IBaseBundle theMessage) {
|
||||
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Transaction Order, per the spec:
|
||||
* <p>
|
||||
* Process any DELETE interactions
|
||||
* Process any POST interactions
|
||||
* Process any PUT interactions
|
||||
* Process any GET interactions
|
||||
*/
|
||||
public class TransactionSorter implements Comparator<Entry> {
|
||||
|
||||
@Override
|
||||
public int compare(Entry theO1, Entry theO2) {
|
||||
int o1 = toOrder(theO1);
|
||||
int o2 = toOrder(theO2);
|
||||
|
||||
return o1 - o2;
|
||||
}
|
||||
|
||||
private int toOrder(Entry theO1) {
|
||||
int o1 = 0;
|
||||
if (theO1.getRequest().getMethodElement().getValueAsEnum() != null) {
|
||||
switch (theO1.getRequest().getMethodElement().getValueAsEnum()) {
|
||||
case DELETE:
|
||||
o1 = 1;
|
||||
break;
|
||||
case POST:
|
||||
o1 = 2;
|
||||
break;
|
||||
case PUT:
|
||||
o1 = 3;
|
||||
break;
|
||||
case GET:
|
||||
o1 = 4;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return o1;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
171
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java
Normal file
171
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/TransactionProcessorVersionAdapterDstu2.java
Normal file
@ -0,0 +1,171 @@
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.BundleTypeEnum;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.HTTPVerbEnum;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.IssueSeverityEnum;
|
||||
import ca.uhn.fhir.model.dstu2.valueset.IssueTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public class TransactionProcessorVersionAdapterDstu2 implements TransactionProcessor.ITransactionProcessorVersionAdapter<Bundle, Bundle.Entry> {
|
||||
@Override
|
||||
public void setResponseStatus(Bundle.Entry theBundleEntry, String theStatus) {
|
||||
theBundleEntry.getResponse().setStatus(theStatus);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setResponseLastModified(Bundle.Entry theBundleEntry, Date theLastModified) {
|
||||
theBundleEntry.getResponse().setLastModified(theLastModified, TemporalPrecisionEnum.MILLI);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setResource(Bundle.Entry theBundleEntry, IBaseResource theResource) {
|
||||
theBundleEntry.setResource((IResource) theResource);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseResource getResource(Bundle.Entry theBundleEntry) {
|
||||
return theBundleEntry.getResource();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBundleType(Bundle theRequest) {
|
||||
if (theRequest.getType() == null) {
|
||||
return null;
|
||||
}
|
||||
return theRequest.getTypeElement().getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void populateEntryWithOperationOutcome(BaseServerResponseException theCaughtEx, Bundle.Entry theEntry) {
|
||||
OperationOutcome oo = new OperationOutcome();
|
||||
oo.addIssue()
|
||||
.setSeverity(IssueSeverityEnum.ERROR)
|
||||
.setDiagnostics(theCaughtEx.getMessage())
|
||||
.setCode(IssueTypeEnum.EXCEPTION);
|
||||
theEntry.setResource(oo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bundle createBundle(String theBundleType) {
|
||||
Bundle resp = new Bundle();
|
||||
try {
|
||||
resp.setType(BundleTypeEnum.forCode(theBundleType));
|
||||
} catch (FHIRException theE) {
|
||||
throw new InternalErrorException("Unknown bundle type: " + theBundleType);
|
||||
}
|
||||
return resp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Bundle.Entry> getEntries(Bundle theRequest) {
|
||||
return theRequest.getEntry();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addEntry(Bundle theBundle, Bundle.Entry theEntry) {
|
||||
theBundle.addEntry(theEntry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bundle.Entry addEntry(Bundle theBundle) {
|
||||
return theBundle.addEntry();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEntryRequestVerb(FhirContext theContext, Bundle.Entry theEntry) {
|
||||
String retVal = null;
|
||||
HTTPVerbEnum value = theEntry.getRequest().getMethodElement().getValueAsEnum();
|
||||
if (value != null) {
|
||||
retVal = value.getCode();
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFullUrl(Bundle.Entry theEntry) {
|
||||
return theEntry.getFullUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEntryIfNoneExist(Bundle.Entry theEntry) {
|
||||
return theEntry.getRequest().getIfNoneExist();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEntryRequestUrl(Bundle.Entry theEntry) {
|
||||
return theEntry.getRequest().getUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setResponseLocation(Bundle.Entry theEntry, String theResponseLocation) {
|
||||
theEntry.getResponse().setLocation(theResponseLocation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setResponseETag(Bundle.Entry theEntry, String theEtag) {
|
||||
theEntry.getResponse().setEtag(theEtag);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEntryRequestIfMatch(Bundle.Entry theEntry) {
|
||||
return theEntry.getRequest().getIfMatch();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEntryRequestIfNoneExist(Bundle.Entry theEntry) {
|
||||
return theEntry.getRequest().getIfNoneExist();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEntryRequestIfNoneMatch(Bundle.Entry theEntry) {
|
||||
return theEntry.getRequest().getIfNoneMatch();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setResponseOutcome(Bundle.Entry theEntry, IBaseOperationOutcome theOperationOutcome) {
|
||||
theEntry.setResource((IResource) theOperationOutcome);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRequestVerb(Bundle.Entry theEntry, String theVerb) {
|
||||
theEntry.getRequest().setMethod(HTTPVerbEnum.forCode(theVerb));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRequestUrl(Bundle.Entry theEntry, String theUrl) {
|
||||
theEntry.getRequest().setUrl(theUrl);
|
||||
}
|
||||
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
@ -38,13 +38,13 @@ public interface IBulkExportJobDao extends JpaRepository<BulkExportJobEntity, Lo
|
||||
Optional<BulkExportJobEntity> findByJobId(@Param("jobid") String theUuid);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myStatus = :status")
|
||||
Slice<BulkExportJobEntity> findByStatus(Pageable thePage, @Param("status") BulkJobStatusEnum theSubmitted);
|
||||
Slice<BulkExportJobEntity> findByStatus(Pageable thePage, @Param("status") BulkExportJobStatusEnum theSubmitted);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myExpiry < :cutoff")
|
||||
Slice<BulkExportJobEntity> findByExpiry(Pageable thePage, @Param("cutoff") Date theCutoff);
|
||||
|
||||
@Query("SELECT j FROM BulkExportJobEntity j WHERE j.myRequest = :request AND j.myCreated > :createdAfter AND j.myStatus <> :status ORDER BY j.myCreated DESC")
|
||||
Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkJobStatusEnum theNotStatus);
|
||||
Slice<BulkExportJobEntity> findExistingJob(Pageable thePage, @Param("request") String theRequest, @Param("createdAfter") Date theCreatedAfter, @Param("status") BulkExportJobStatusEnum theNotStatus);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM BulkExportJobEntity t")
|
||||
|
40
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java
Normal file
40
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobDao.java
Normal file
@ -0,0 +1,40 @@
|
||||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public interface IBulkImportJobDao extends JpaRepository<BulkImportJobEntity, Long> {
|
||||
|
||||
@Query("SELECT j FROM BulkImportJobEntity j WHERE j.myJobId = :jobid")
|
||||
Optional<BulkImportJobEntity> findByJobId(@Param("jobid") String theUuid);
|
||||
|
||||
@Query("SELECT j FROM BulkImportJobEntity j WHERE j.myStatus = :status")
|
||||
Slice<BulkImportJobEntity> findByStatus(Pageable thePage, @Param("status") BulkImportJobStatusEnum theStatus);
|
||||
}
|
43
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java
Normal file
43
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBulkImportJobFileDao.java
Normal file
@ -0,0 +1,43 @@
|
||||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public interface IBulkImportJobFileDao extends JpaRepository<BulkImportJobFileEntity, Long> {
|
||||
|
||||
@Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC")
|
||||
List<BulkImportJobFileEntity> findAllForJob(@Param("jobId") String theJobId);
|
||||
|
||||
@Query("SELECT f FROM BulkImportJobFileEntity f WHERE f.myJob = :job AND f.myFileSequence = :fileIndex")
|
||||
Optional<BulkImportJobFileEntity> findForJob(@Param("job") BulkImportJobEntity theJob, @Param("fileIndex") int theFileIndex);
|
||||
|
||||
@Query("SELECT f.myId FROM BulkImportJobFileEntity f WHERE f.myJob.myJobId = :jobId ORDER BY f.myFileSequence ASC")
|
||||
List<Long> findAllIdsForJob(@Param("jobId") String theJobId);
|
||||
|
||||
}
|
@ -22,18 +22,13 @@ package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
|
||||
import org.hl7.fhir.dstu3.model.Meta;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.TypedQuery;
|
||||
@ -42,14 +37,10 @@ import java.util.List;
|
||||
|
||||
public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao<Bundle, Meta> {
|
||||
|
||||
@Autowired
|
||||
private TransactionProcessor myTransactionProcessor;
|
||||
|
||||
@Override
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
super.start();
|
||||
myTransactionProcessor.setDao(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -88,12 +79,5 @@ public class FhirSystemDaoDstu3 extends BaseHapiFhirSystemDao<Bundle, Meta> {
|
||||
return FhirResourceDaoMessageHeaderDstu2.throwProcessMessageNotImplemented();
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
|
||||
return myTransactionProcessor.transaction(theRequestDetails, theRequest);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobFileEntity;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchInclude;
|
||||
@ -123,6 +125,8 @@ public class ExpungeEverythingService {
|
||||
counter.addAndGet(expungeEverythingByType(NpmPackageVersionEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(NpmPackageEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(SearchParamPresent.class));
|
||||
counter.addAndGet(expungeEverythingByType(BulkImportJobFileEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(BulkImportJobEntity.class));
|
||||
counter.addAndGet(expungeEverythingByType(ForcedId.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamDate.class));
|
||||
counter.addAndGet(expungeEverythingByType(ResourceIndexedSearchParamNumber.class));
|
||||
|
@ -22,42 +22,20 @@ package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.TypedQuery;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao<Bundle, Meta> {
|
||||
|
||||
@Autowired
|
||||
private TransactionProcessor myTransactionProcessor;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) {
|
||||
myTransactionProcessor = theTransactionProcessor;
|
||||
}
|
||||
|
||||
@Override
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
super.start();
|
||||
myTransactionProcessor.setDao(this);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Meta metaGetOperation(RequestDetails theRequestDetails) {
|
||||
// Notify interceptors
|
||||
@ -95,10 +73,4 @@ public class FhirSystemDaoR4 extends BaseHapiFhirSystemDao<Bundle, Meta> {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
|
||||
return myTransactionProcessor.transaction(theRequestDetails, theRequest);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -22,20 +22,14 @@ package ca.uhn.fhir.jpa.dao.r5;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FhirResourceDaoMessageHeaderDstu2;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.r5.model.Bundle;
|
||||
import org.hl7.fhir.r5.model.Bundle.BundleEntryComponent;
|
||||
import org.hl7.fhir.r5.model.Meta;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.persistence.TypedQuery;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
@ -44,17 +38,6 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao<Bundle, Meta> {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirSystemDaoR5.class);
|
||||
|
||||
@Autowired
|
||||
private TransactionProcessor myTransactionProcessor;
|
||||
|
||||
@Override
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
super.start();
|
||||
myTransactionProcessor.setDao(this);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Meta metaGetOperation(RequestDetails theRequestDetails) {
|
||||
// Notify interceptors
|
||||
@ -92,10 +75,5 @@ public class FhirSystemDaoR5 extends BaseHapiFhirSystemDao<Bundle, Meta> {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public Bundle transaction(RequestDetails theRequestDetails, Bundle theRequest) {
|
||||
return myTransactionProcessor.transaction(theRequestDetails, theRequest);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.entity;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hl7.fhir.r5.model.InstantType;
|
||||
@ -51,9 +51,9 @@ import static org.apache.commons.lang3.StringUtils.left;
|
||||
|
||||
@Entity
|
||||
@Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = {
|
||||
@UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")
|
||||
@UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")
|
||||
}, indexes = {
|
||||
@Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
|
||||
@Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
|
||||
})
|
||||
public class BulkExportJobEntity implements Serializable {
|
||||
|
||||
@ -70,7 +70,7 @@ public class BulkExportJobEntity implements Serializable {
|
||||
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Column(name = "JOB_STATUS", length = 10, nullable = false)
|
||||
private BulkJobStatusEnum myStatus;
|
||||
private BulkExportJobStatusEnum myStatus;
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "CREATED_TIME", nullable = false)
|
||||
private Date myCreated;
|
||||
@ -156,11 +156,11 @@ public class BulkExportJobEntity implements Serializable {
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
public BulkJobStatusEnum getStatus() {
|
||||
public BulkExportJobStatusEnum getStatus() {
|
||||
return myStatus;
|
||||
}
|
||||
|
||||
public void setStatus(BulkJobStatusEnum theStatus) {
|
||||
public void setStatus(BulkExportJobStatusEnum theStatus) {
|
||||
if (myStatus != theStatus) {
|
||||
myStatusTime = new Date();
|
||||
myStatus = theStatus;
|
||||
|
157
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java
Normal file
157
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java
Normal file
@ -0,0 +1,157 @@
|
||||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.EnumType;
|
||||
import javax.persistence.Enumerated;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
import javax.persistence.UniqueConstraint;
|
||||
import javax.persistence.Version;
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
|
||||
@Entity
|
||||
@Table(name = "HFJ_BLK_IMPORT_JOB", uniqueConstraints = {
|
||||
@UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID")
|
||||
})
|
||||
public class BulkImportJobEntity implements Serializable {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID")
|
||||
@SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID")
|
||||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
|
||||
@Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false, updatable = false)
|
||||
private String myJobId;
|
||||
@Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN)
|
||||
private String myJobDescription;
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Column(name = "JOB_STATUS", length = 10, nullable = false)
|
||||
private BulkImportJobStatusEnum myStatus;
|
||||
@Version
|
||||
@Column(name = "OPTLOCK", nullable = false)
|
||||
private int myVersion;
|
||||
@Column(name = "FILE_COUNT", nullable = false)
|
||||
private int myFileCount;
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "STATUS_TIME", nullable = false)
|
||||
private Date myStatusTime;
|
||||
@Column(name = "STATUS_MESSAGE", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN)
|
||||
private String myStatusMessage;
|
||||
@Column(name = "ROW_PROCESSING_MODE", length = 20, nullable = false, updatable = false)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private JobFileRowProcessingModeEnum myRowProcessingMode;
|
||||
@Column(name = "BATCH_SIZE", nullable = false, updatable = false)
|
||||
private int myBatchSize;
|
||||
|
||||
public String getJobDescription() {
|
||||
return myJobDescription;
|
||||
}
|
||||
|
||||
public void setJobDescription(String theJobDescription) {
|
||||
myJobDescription = left(theJobDescription, BulkExportJobEntity.STATUS_MESSAGE_LEN);
|
||||
}
|
||||
|
||||
public JobFileRowProcessingModeEnum getRowProcessingMode() {
|
||||
return myRowProcessingMode;
|
||||
}
|
||||
|
||||
public void setRowProcessingMode(JobFileRowProcessingModeEnum theRowProcessingMode) {
|
||||
myRowProcessingMode = theRowProcessingMode;
|
||||
}
|
||||
|
||||
public Date getStatusTime() {
|
||||
return myStatusTime;
|
||||
}
|
||||
|
||||
public void setStatusTime(Date theStatusTime) {
|
||||
myStatusTime = theStatusTime;
|
||||
}
|
||||
|
||||
public int getFileCount() {
|
||||
return myFileCount;
|
||||
}
|
||||
|
||||
public void setFileCount(int theFileCount) {
|
||||
myFileCount = theFileCount;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return myJobId;
|
||||
}
|
||||
|
||||
public void setJobId(String theJobId) {
|
||||
myJobId = theJobId;
|
||||
}
|
||||
|
||||
public BulkImportJobStatusEnum getStatus() {
|
||||
return myStatus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the status, updates the status time, and clears the status message
|
||||
*/
|
||||
public void setStatus(BulkImportJobStatusEnum theStatus) {
|
||||
if (myStatus != theStatus) {
|
||||
myStatus = theStatus;
|
||||
setStatusTime(new Date());
|
||||
setStatusMessage(null);
|
||||
}
|
||||
}
|
||||
|
||||
public String getStatusMessage() {
|
||||
return myStatusMessage;
|
||||
}
|
||||
|
||||
public void setStatusMessage(String theStatusMessage) {
|
||||
myStatusMessage = left(theStatusMessage, BulkExportJobEntity.STATUS_MESSAGE_LEN);
|
||||
}
|
||||
|
||||
public BulkImportJobJson toJson() {
|
||||
return new BulkImportJobJson()
|
||||
.setProcessingMode(getRowProcessingMode())
|
||||
.setFileCount(getFileCount())
|
||||
.setJobDescription(getJobDescription());
|
||||
}
|
||||
|
||||
public int getBatchSize() {
|
||||
return myBatchSize;
|
||||
}
|
||||
|
||||
public void setBatchSize(int theBatchSize) {
|
||||
myBatchSize = theBatchSize;
|
||||
}
|
||||
}
|
104
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java
Normal file
104
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobFileEntity.java
Normal file
@ -0,0 +1,104 @@
|
||||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.ForeignKey;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.Lob;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import java.io.Serializable;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
@Entity
|
||||
@Table(name = "HFJ_BLK_IMPORT_JOBFILE", indexes = {
|
||||
@Index(name = "IDX_BLKIM_JOBFILE_JOBID", columnList = "JOB_PID")
|
||||
})
|
||||
public class BulkImportJobFileEntity implements Serializable {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOBFILE_PID")
|
||||
@SequenceGenerator(name = "SEQ_BLKIMJOBFILE_PID", sequenceName = "SEQ_BLKIMJOBFILE_PID")
|
||||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
|
||||
@ManyToOne
|
||||
@JoinColumn(name = "JOB_PID", referencedColumnName = "PID", nullable = false, foreignKey = @ForeignKey(name = "FK_BLKIMJOBFILE_JOB"))
|
||||
private BulkImportJobEntity myJob;
|
||||
|
||||
@Column(name = "FILE_SEQ", nullable = false)
|
||||
private int myFileSequence;
|
||||
|
||||
@Lob
|
||||
@Column(name = "JOB_CONTENTS", nullable = false)
|
||||
private byte[] myContents;
|
||||
|
||||
@Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH)
|
||||
private String myTenantName;
|
||||
|
||||
public BulkImportJobEntity getJob() {
|
||||
return myJob;
|
||||
}
|
||||
|
||||
public void setJob(BulkImportJobEntity theJob) {
|
||||
myJob = theJob;
|
||||
}
|
||||
|
||||
public int getFileSequence() {
|
||||
return myFileSequence;
|
||||
}
|
||||
|
||||
public void setFileSequence(int theFileSequence) {
|
||||
myFileSequence = theFileSequence;
|
||||
}
|
||||
|
||||
public String getContents() {
|
||||
return new String(myContents, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
public void setContents(String theContents) {
|
||||
myContents = theContents.getBytes(StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
|
||||
public BulkImportJobFileJson toJson() {
|
||||
return new BulkImportJobFileJson()
|
||||
.setContents(getContents())
|
||||
.setTenantName(getTenantName());
|
||||
}
|
||||
|
||||
public void setTenantName(String theTenantName) {
|
||||
myTenantName = theTenantName;
|
||||
}
|
||||
|
||||
public String getTenantName() {
|
||||
return myTenantName;
|
||||
}
|
||||
}
|
@ -65,7 +65,6 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
@ -655,16 +654,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
||||
}
|
||||
|
||||
private void deleteAndExpungeResourceBinary(IIdType theResourceBinaryId, ExpungeOptions theOptions) {
|
||||
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME);
|
||||
getBinaryDao().delete(theResourceBinaryId, requestDetails).getEntity();
|
||||
getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, requestDetails);
|
||||
} else {
|
||||
getBinaryDao().delete(theResourceBinaryId).getEntity();
|
||||
getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, null);
|
||||
}
|
||||
getBinaryDao().delete(theResourceBinaryId, new SystemRequestDetails()).getEntity();
|
||||
getBinaryDao().forceExpungeInExistingTransaction(theResourceBinaryId, theOptions, new SystemRequestDetails());
|
||||
}
|
||||
|
||||
|
||||
|
@ -347,7 +347,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
||||
private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) {
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||
requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME);
|
||||
// requestDetails.setTenantId(JpaConstants.DEFAULT_PARTITION_NAME);
|
||||
return theDao.search(theMap, requestDetails);
|
||||
} else {
|
||||
return theDao.search(theMap);
|
||||
@ -404,9 +404,15 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
||||
}
|
||||
|
||||
private boolean isStructureDefinitionWithoutSnapshot(IBaseResource r) {
|
||||
boolean retVal = false;
|
||||
FhirTerser terser = myFhirContext.newTerser();
|
||||
return r.getClass().getSimpleName().equals("StructureDefinition") &&
|
||||
terser.getSingleValueOrNull(r, "snapshot") == null;
|
||||
if (r.getClass().getSimpleName().equals("StructureDefinition")) {
|
||||
Optional<String> kind = terser.getSinglePrimitiveValue(r, "kind");
|
||||
if (kind.isPresent() && !(kind.get().equals("logical"))) {
|
||||
retVal = terser.getSingleValueOrNull(r, "snapshot") == null;
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private IBaseResource generateSnapshot(IBaseResource sd) {
|
||||
|
@ -35,6 +35,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
@ -44,11 +45,15 @@ import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME;
|
||||
import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooks;
|
||||
import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.doCallHooksAndReturnObject;
|
||||
import static ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster.hasHooks;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
||||
private static final Logger ourLog = getLogger(RequestPartitionHelperSvc.class);
|
||||
|
||||
|
||||
private final HashSet<Object> myNonPartitionableResourceNames;
|
||||
|
||||
@ -95,14 +100,18 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
||||
public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType) {
|
||||
RequestPartitionId requestPartitionId;
|
||||
|
||||
boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType);
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
// Handle system requests
|
||||
if ((theRequest == null && myNonPartitionableResourceNames.contains(theResourceType))) {
|
||||
//TODO GGG eventually, theRequest will not be allowed to be null here, and we will pass through SystemRequestDetails instead.
|
||||
if (theRequest == null && nonPartitionableResource) {
|
||||
return RequestPartitionId.defaultPartition();
|
||||
}
|
||||
|
||||
// Interceptor call: STORAGE_PARTITION_IDENTIFY_READ
|
||||
if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) {
|
||||
if (theRequest instanceof SystemRequestDetails) {
|
||||
requestPartitionId = getSystemRequestPartitionId(theRequest, nonPartitionableResource);
|
||||
// Interceptor call: STORAGE_PARTITION_IDENTIFY_READ
|
||||
} else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) {
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
@ -119,6 +128,47 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
||||
return RequestPartitionId.allPartitions();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* For system requests, read partition from tenant ID if present, otherwise set to DEFAULT. If the resource they are attempting to partition
|
||||
* is non-partitionable scream in the logs and set the partition to DEFAULT.
|
||||
*
|
||||
* @param theRequest
|
||||
* @param theNonPartitionableResource
|
||||
* @return
|
||||
*/
|
||||
private RequestPartitionId getSystemRequestPartitionId(RequestDetails theRequest, boolean theNonPartitionableResource) {
|
||||
RequestPartitionId requestPartitionId;
|
||||
requestPartitionId = getSystemRequestPartitionId(theRequest);
|
||||
if (theNonPartitionableResource && !requestPartitionId.isDefaultPartition()) {
|
||||
throw new InternalErrorException("System call is attempting to write a non-partitionable resource to a partition! This is a bug!");
|
||||
}
|
||||
return requestPartitionId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the partition for a System Call (defined by the fact that the request is of type SystemRequestDetails)
|
||||
*
|
||||
* 1. If the tenant ID is set to the constant for all partitions, return all partitions
|
||||
* 2. If there is a tenant ID set in the request, use it.
|
||||
* 3. Otherwise, return the Default Partition.
|
||||
*
|
||||
* @param theRequest The {@link SystemRequestDetails}
|
||||
* @return the {@link RequestPartitionId} to be used for this request.
|
||||
*/
|
||||
@Nonnull
|
||||
private RequestPartitionId getSystemRequestPartitionId(@Nonnull RequestDetails theRequest) {
|
||||
if (theRequest.getTenantId() != null) {
|
||||
if (theRequest.getTenantId().equals(ALL_PARTITIONS_NAME)) {
|
||||
return RequestPartitionId.allPartitions();
|
||||
} else {
|
||||
return RequestPartitionId.fromPartitionName(theRequest.getTenantId());
|
||||
}
|
||||
} else {
|
||||
return RequestPartitionId.defaultPartition();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoke the {@link Pointcut#STORAGE_PARTITION_IDENTIFY_CREATE} interceptor pointcut to determine the tenant for a create request.
|
||||
*/
|
||||
@ -128,18 +178,22 @@ public class RequestPartitionHelperSvc implements IRequestPartitionHelperSvc {
|
||||
RequestPartitionId requestPartitionId;
|
||||
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
|
||||
// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE
|
||||
HookParams params = new HookParams()
|
||||
.add(IBaseResource.class, theResource)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params);
|
||||
|
||||
// Handle system requests
|
||||
boolean nonPartitionableResource = myNonPartitionableResourceNames.contains(theResourceType);
|
||||
if (nonPartitionableResource && requestPartitionId == null) {
|
||||
requestPartitionId = RequestPartitionId.defaultPartition();
|
||||
|
||||
if (theRequest instanceof SystemRequestDetails) {
|
||||
requestPartitionId = getSystemRequestPartitionId(theRequest, nonPartitionableResource);
|
||||
} else {
|
||||
//This is an external Request (e.g. ServletRequestDetails) so we want to figure out the partition via interceptor.
|
||||
HookParams params = new HookParams()// Interceptor call: STORAGE_PARTITION_IDENTIFY_CREATE
|
||||
.add(IBaseResource.class, theResource)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE, params);
|
||||
|
||||
//If the interceptors haven't selected a partition, and its a non-partitionable resource anyhow, send to DEFAULT
|
||||
if (nonPartitionableResource && requestPartitionId == null) {
|
||||
requestPartitionId = RequestPartitionId.defaultPartition();
|
||||
}
|
||||
}
|
||||
|
||||
String resourceName = myFhirContext.getResourceType(theResource);
|
||||
|
@ -35,17 +35,15 @@ import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ImmutableListMultimap;
|
||||
import com.google.common.collect.ImmutableMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.Multimaps;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Reader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME;
|
||||
|
||||
/**
|
||||
* A default RequestDetails implementation that can be used for system calls to
|
||||
@ -104,6 +102,11 @@ public class SystemRequestDetails extends RequestDetails {
|
||||
}
|
||||
myHeaders.put(theName, theValue);
|
||||
}
|
||||
public static SystemRequestDetails newSystemRequestAllPartitions() {
|
||||
SystemRequestDetails systemRequestDetails = new SystemRequestDetails();
|
||||
systemRequestDetails.setTenantId(ALL_PARTITIONS_NAME);
|
||||
return systemRequestDetails;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
|
@ -0,0 +1,58 @@
|
||||
package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobInstance;
|
||||
import org.springframework.batch.core.explore.JobExplorer;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class BaseBatchJobR4Test extends BaseJpaR4Test {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseBatchJobR4Test.class);
|
||||
@Autowired
|
||||
private JobExplorer myJobExplorer;
|
||||
|
||||
protected List<JobExecution> awaitAllBulkJobCompletions(String... theJobNames) {
|
||||
assert theJobNames.length > 0;
|
||||
|
||||
List<JobInstance> bulkExport = new ArrayList<>();
|
||||
for (String nextName : theJobNames) {
|
||||
bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(nextName, 0, 100));
|
||||
}
|
||||
if (bulkExport.isEmpty()) {
|
||||
List<String> wantNames = Arrays.asList(theJobNames);
|
||||
List<String> haveNames = myJobExplorer.getJobNames();
|
||||
fail("There are no jobs running - Want names " + wantNames + " and have names " + haveNames);
|
||||
}
|
||||
List<JobExecution> bulkExportExecutions = bulkExport.stream().flatMap(jobInstance -> myJobExplorer.getJobExecutions(jobInstance).stream()).collect(Collectors.toList());
|
||||
awaitJobCompletions(bulkExportExecutions);
|
||||
|
||||
return bulkExportExecutions;
|
||||
}
|
||||
|
||||
protected void awaitJobCompletions(Collection<JobExecution> theJobs) {
|
||||
theJobs.forEach(jobExecution -> awaitJobCompletion(jobExecution));
|
||||
}
|
||||
|
||||
protected void awaitJobCompletion(JobExecution theJobExecution) {
|
||||
await().atMost(120, TimeUnit.SECONDS).until(() -> {
|
||||
JobExecution jobExecution = myJobExplorer.getJobExecution(theJobExecution.getId());
|
||||
ourLog.info("JobExecution {} currently has status: {}- Failures if any: {}", theJobExecution.getId(), jobExecution.getStatus(), jobExecution.getFailureExceptions());
|
||||
return jobExecution.getStatus() == BatchStatus.COMPLETED || jobExecution.getStatus() == BatchStatus.FAILED;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
@ -2,11 +2,11 @@ package ca.uhn.fhir.jpa.bulk;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.bulk.model.BulkJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportResponseJson;
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
|
||||
@ -188,7 +188,7 @@ public class BulkDataExportProviderTest {
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID)
|
||||
.setStatus(BulkJobStatusEnum.BUILDING)
|
||||
.setStatus(BulkExportJobStatusEnum.BUILDING)
|
||||
.setStatusTime(InstantType.now().getValue());
|
||||
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||
|
||||
@ -212,7 +212,7 @@ public class BulkDataExportProviderTest {
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID)
|
||||
.setStatus(BulkJobStatusEnum.ERROR)
|
||||
.setStatus(BulkExportJobStatusEnum.ERROR)
|
||||
.setStatusTime(InstantType.now().getValue())
|
||||
.setStatusMessage("Some Error Message");
|
||||
when(myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(eq(A_JOB_ID))).thenReturn(jobInfo);
|
||||
@ -239,7 +239,7 @@ public class BulkDataExportProviderTest {
|
||||
|
||||
IBulkDataExportSvc.JobInfo jobInfo = new IBulkDataExportSvc.JobInfo()
|
||||
.setJobId(A_JOB_ID)
|
||||
.setStatus(BulkJobStatusEnum.COMPLETE)
|
||||
.setStatus(BulkExportJobStatusEnum.COMPLETE)
|
||||
.setStatusTime(InstantType.now().getValue());
|
||||
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/111"));
|
||||
jobInfo.addFile().setResourceType("Patient").setResourceId(new IdType("Binary/222"));
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user