merge master
This commit is contained in:
commit
94f4009d73
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
|
|
|
@ -31,7 +31,7 @@ public interface IAnyResource extends IBaseResource {
|
|||
/**
|
||||
* Search parameter constant for <b>_id</b>
|
||||
*/
|
||||
@SearchParamDefinition(name="_id", path="", description="The ID of the resource", type="token" )
|
||||
@SearchParamDefinition(name="_id", path="", description="The ID of the resource", type="token")
|
||||
String SP_RES_ID = "_id";
|
||||
|
||||
/**
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -54,6 +54,31 @@ public final class BatchConstants {
|
|||
* MDM Clear
|
||||
*/
|
||||
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
|
||||
|
||||
/**
|
||||
* TermCodeSystem delete
|
||||
*/
|
||||
public static final String TERM_CODE_SYSTEM_DELETE_JOB_NAME = "termCodeSystemDeleteJob";
|
||||
public static final String TERM_CONCEPT_RELATIONS_DELETE_STEP_NAME = "termConceptRelationsDeleteStep";
|
||||
public static final String TERM_CONCEPTS_DELETE_STEP_NAME = "termConceptsDeleteStep";
|
||||
public static final String TERM_CODE_SYSTEM_VERSION_DELETE_STEP_NAME = "termCodeSystemVersionDeleteStep";
|
||||
public static final String TERM_CODE_SYSTEM_DELETE_STEP_NAME = "termCodeSystemDeleteStep";
|
||||
public static final String JOB_PARAM_CODE_SYSTEM_ID = "termCodeSystemPid";
|
||||
|
||||
/**
|
||||
* TermCodeSystemVersion delete
|
||||
*/
|
||||
public static final String TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME = "termCodeSystemVersionDeleteJob";
|
||||
public static final String TERM_CONCEPT_RELATIONS_UNIQUE_VERSION_DELETE_STEP_NAME = "termConceptRelationsUniqueVersionDeleteStep";
|
||||
public static final String TERM_CONCEPTS_UNIQUE_VERSION_DELETE_STEP_NAME = "termConceptsUniqueVersionDeleteStep";
|
||||
public static final String TERM_CODE_SYSTEM_UNIQUE_VERSION_DELETE_STEP_NAME = "termCodeSystemUniqueVersionDeleteStep";
|
||||
|
||||
/**
|
||||
* Both: TermCodeSystem delete and TermCodeSystemVersion delete
|
||||
*/
|
||||
public static final String JOB_PARAM_CODE_SYSTEM_VERSION_ID = "termCodeSystemVersionPid";
|
||||
|
||||
|
||||
public static final String BULK_EXPORT_READ_CHUNK_PARAMETER = "readChunkSize";
|
||||
public static final String BULK_EXPORT_GROUP_ID_PARAMETER = "groupId";
|
||||
/**
|
||||
|
|
|
@ -32,6 +32,8 @@ import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteExcep
|
|||
import org.springframework.batch.core.repository.JobRepository;
|
||||
import org.springframework.batch.core.repository.JobRestartException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
|
@ -46,6 +48,7 @@ public class BatchJobSubmitterImpl implements IBatchJobSubmitter {
|
|||
private JobRepository myJobRepository;
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NOT_SUPPORTED)
|
||||
public JobExecution runJob(Job theJob, JobParameters theJobParameters) throws JobParametersInvalidException {
|
||||
try {
|
||||
return myJobLauncher.run(theJob, theJobParameters);
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: add
|
||||
issue: 3131
|
||||
title: "Provided a Remote Terminology Service implementation for the $lookup Operation."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
jira: SMILE-3152
|
||||
issue: 3138
|
||||
title: "Previously, the package registry would not work correctly when externalized binary storage was enabled. This has been corrected."
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
release-date: "2021-11-18"
|
||||
codename: "Raccoon"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 3108
|
||||
title: "Code System deletion background tasks were taking over a day to complete on very large CodeSystems for PostgreSQL, SQL Server and Oracle
|
||||
databases. That was improved now taking less than an hour in all three platforms"
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 3145
|
||||
title: "RequestValidatingInteceptor incorrectly prevented GraphQL requests from being submitted using
|
||||
the HTTP POST form of the GraphQL operation."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 3153
|
||||
jira: SMILE-3289
|
||||
title: "Updated UnknownCodeSystemWarningValidationSupport to allow the throwing of warnings if
|
||||
configured to do so."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 3158
|
||||
title: "Resource links were previously not being consistently created in cases where references were versioned and
|
||||
pointing to recently auto-created placeholder resources."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 3170
|
||||
title: "Fixed language code validation so that it is case insensitive (eg, en-US, en-us, EN-US, EN-us should all work)"
|
|
@ -73,6 +73,7 @@ page.server_jpa_mdm.mdm_expansion=MDM Search Expansion
|
|||
|
||||
section.server_jpa_cql.title=JPA Server: CQL
|
||||
page.server_jpa_cql.cql=CQL Getting Started
|
||||
page.server_jpa_cql.cql_measure=CQL Measure
|
||||
|
||||
section.server_jpa_partitioning.title=JPA Server: Partitioning and Multitenancy
|
||||
page.server_jpa_partitioning.partitioning=Partitioning and Multitenancy
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 50 KiB |
|
@ -0,0 +1,43 @@
|
|||
@startuml measure_evaluation_sequence
|
||||
!include styling.puml
|
||||
title Measure $evaluate-measure
|
||||
|
||||
actor User as User
|
||||
participant OperationProvider as "HAPI Measure Operation Provider"
|
||||
participant CQLEngine as "CQL Engine"
|
||||
participant HAPI as "HAPI CQL Adapters"
|
||||
participant JPA as "HAPI JPA / Terminology Providers"
|
||||
|
||||
User -> OperationProvider: invoke $evaluate-measure
|
||||
OperationProvider -> JPA: read Measure
|
||||
JPA --> OperationProvider: return Measure
|
||||
OperationProvider -> JPA: read Libraries
|
||||
JPA --> OperationProvider: return Libraries
|
||||
OperationProvider -> OperationProvider: convert FHIR Libraries to ELM libraries
|
||||
OperationProvider-> CQLEngine **: create with ELM Libraries
|
||||
|
||||
OperationProvider -> JPA: get Subjects
|
||||
JPA --> OperationProvider: return Subjects
|
||||
loop each Subject
|
||||
OperationProvider -> CQLEngine: set current Subject context
|
||||
loop each SDE/Population
|
||||
OperationProvider -> CQLEngine: evaluate SDE/Population criteria
|
||||
CQLEngine -> CQLEngine: evaluate Definition
|
||||
alt terminology required
|
||||
CQLEngine -> HAPI: retrieve terminology
|
||||
HAPI -> JPA: request terminology
|
||||
JPA --> HAPI: return terminology
|
||||
HAPI --> CQLEngine: return terminology
|
||||
end
|
||||
CQLEngine -> HAPI: retrieve data
|
||||
HAPI -> JPA: request data
|
||||
JPA --> HAPI: return data
|
||||
HAPI --> CQLEngine: return data
|
||||
CQLEngine --> OperationProvider: return SDE/Population criteria result
|
||||
end
|
||||
end
|
||||
OperationProvider -> CQLEngine !!: destroy
|
||||
OperationProvider -> OperationProvider: build MeasureReport
|
||||
OperationProvider -> OperationProvider: score MeasureReport
|
||||
OperationProvider -> User: return MeasureReport
|
||||
@enduml
|
|
@ -0,0 +1,528 @@
|
|||
<svg host="65bd71144e" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="1562px" height="1162px" viewBox="-0.5 -0.5 1562 1162" content="<mxfile scale="2" border="0"><diagram id="To3equ_uzXX-298yew6h" name="Page-1">5Vttc5s4EP41nul9uA4gwPhjYidNbtxpLs5Nrx9lkG1NBGKE7Nj99SeBwIDk2m0gdi/pTINWL8Czy+6j1WYAxvH2E4Pp6jONEBk4VrQdgMnAcWzHAuKXlOwKiWu7hWDJcKQG7QUz/B0poaWkaxyhrDGQU0o4TpvCkCYJCnlDBhmjL81hC0qad03hEmmCWQiJLv2KI74qpIFn7eV3CC9X5Z1tS/XMYfi8ZHSdqPsNHHCb/xTdMSzXUuOzFYzoS00EbgZgzCjlxVW8HSMisS1hK+bdHuitnpuhhJ8yYVhM2ECyVq8+oeEzYkI2pgmHOJHX+ZPyXYlO/n5IrmANwPXLCnM0S2Eoe1+EPQjZisdEtGxxSeAckQeaYY5pImSheDKxJrjeIMaxgHzaGsCpXAESvJQtgha8NvhKieeUcxqLjgUmZEwJZfmjgYUn/wl5xhl9RrUeP/+RM8R71eTFj5Dr0Ck05b3RtiZSUH5CNEac7cQQ1RsorSqrt4eq/bK3Ic9VslXNftyREkJlt8tq6b3uxIVSn1mVwNV0+RnBbM3QQBrBrVTp39NcsXFKE/GWmaZZ8Z68qb4mjomY1wJdiUp9aeotNRbjKJK3MdpL06KkhmbqoeyyXfgI2+tGU7bfVFVg0JRl0JTThaJ8TVF3ou0TCf1cfG7+Ul59eEQZXbNQqu8Jzonwh85Yqg+Gq73sD02HwqWk8jLcESxAZeD4Nzov4J/OK0Hlxr6suVim1ETWrRYc4DW0UH0cL3Wnq2uhcrevUkOgqeGJxiHkQjZDbPNax7eiDH+XPpSo8V2YrdU0W8fSAQMmsw26AGykATZLGU6WQvYVzcX/V2mqAof0I5eHHjgjeqVp19C7hhmSlkbQJYLlnxMsWwPrS4oYzCmCYz0wuhEEkQmPaH0YOC6S4yBHf8ZFwNOd4k/B2QV6o+PoubYBPq8L9BwNvSLy3xQ40Vd6tj7w8XR8bJN5uV3gAzR8bu/uH4WkFnInkGY/DMrtrvHjP5OcaV3nAQQyEaYN4fmtkQatkAFGBqQ9A9J+F0jrnHS2yziKC4AN+D4xmGQwlB/6BYDXNlMjeKbPuBPwPA28J8RinAjmvdwploJNpvjhDgtnKQxQEvA8IkeoZpQFj7zZpgJqgfP5YXa9M9qop8ea6VpsY0y4Svwr+O8FRtvfmn+7rRj/pvzb033DXw9XMqLf4TliiYjn57fMtgNwDXHKGMdBFwjpG0UNEJREVzLhJQ2OwCzDYRMDtMX8XwnXR0+1vtV6JluFZN7YlY1EPGhtkmx+q/ftp+WtxrwHxLB4U5kD+DEVVXG0YQscsiUqmZAah6JGrk5XVWPPrmuilDFEBH/cNDN8JvWoOzxQnPC9JXjOAR9VLlG8j5rl1FJu7YWGrYXalKYAQVtIqBnuasNSOSA7/MBg1NpcW9aPn6tM2prHi4viCfamW+ngNGvWc43vyJrLpM+FWDPwWsbR9lenWnPl/MqFhpdhzc4w6NWafZ01qD1WspTx+tyBq02pjBssk4F1scHyD21Ac3ZPLmIHCpwTAOprB+rrvjDnPm3CWeP7Zc7j7MC5rVMOAAzAOQbguiDrvp60NQJ3u8LsUSyO0cbA4y8Hy/bm3IBl0BeUejp3iudi27jLE7jSmV4sbsA+jtuoJ9zKAFc/vrl6uNfRupl+NmSJitObM+Pn+Mfx62u/PdT3fRoeNR5INzKrfZwEVrzvAAmMbjE5mh6vszj3svYffvvwwm7p4lTG1mZSdvtM8wBj+wWSZDs6S3q8mT0JSf7FtLUukM6rAZr6Pv1omaEMf5fHokrPilSKxb3rgTepNK99KFUliZo8qA5660ZSWO7Bj8r66DaRdTpRfGsCXSwy9FrFDHXfb/ZhMo5OZA7xzA5LS8QaHNawJ4cV6McFGh5V0m/NyO6awfBZbvyOAbNHMS9zIDi96w4y/4TEv4lbdFGLEug+vrAvawI5lJ9/GKJMHt9N4c7AJ15fh3KgbugnqlC6UEGLKruOp6nASJW7KDMJ9OOD39Js35ASB4bKnMJsi+3rvQxAC5ifD97HKUGxeM/8ULqPUqqLNGFgOCKoSgW63iYHJ6QMuzZhdcRSkMse7Nk50Q13gp++W+4dvwgzFKpSzoQy+Yp9VD65hoo9046vCxjLdeun2OYCscuon+wA7uBAevgtAtfoUGq1cr/vxtuanIXdl7cY6Tx3TBmqol9MozV5R9ibEsLG6u0usD8hKfIbkDUTZr2Z6/+D35oqA02QdVEYONL5lCp8q1dXvlNm61iGzVlf37tdGuvPBLlfSLoYMl0aaCcnXUzRyFj88gsIieb+r56KjNb+T8vAzX8=</diagram></mxfile>" style="background-color: rgb(255, 255, 255);">
|
||||
<defs/>
|
||||
<g>
|
||||
<rect x="0" y="180" width="1080" height="980" fill="#f5f5f5" stroke="#666666" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe flex-end; justify-content: unsafe flex-start; width: 538px; height: 1px; padding-top: 87px; margin-left: 2px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: left; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #333333; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
Docker Container
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="2" y="87" fill="#333333" font-family="Helvetica" font-size="12px">
|
||||
Docker Container
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="160" y="0" width="1000" height="40" fill="none" stroke="none" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 498px; height: 1px; padding-top: 10px; margin-left: 81px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 15px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; font-weight: bold; white-space: normal; word-wrap: normal; ">
|
||||
Measure / CQL Components
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="330" y="15" fill="#000000" font-family="Helvetica" font-size="15px" text-anchor="middle" font-weight="bold">
|
||||
Measure / CQL Components
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<path d="M 310 950 C 310 933.43 348.06 920 395 920 C 417.54 920 439.16 923.16 455.1 928.79 C 471.04 934.41 480 942.04 480 950 L 480 1090 C 480 1106.57 441.94 1120 395 1120 C 348.06 1120 310 1106.57 310 1090 Z" fill="#ffffff" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="all"/>
|
||||
<path d="M 480 950 C 480 966.57 441.94 980 395 980 C 348.06 980 310 966.57 310 950" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 83px; height: 1px; padding-top: 523px; margin-left: 156px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
H2
|
||||
<br/>
|
||||
(Resource Tables, Cache Tables)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="198" y="526" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
H2...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="40" y="240" width="60" height="560" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)rotate(-90 35 260)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 278px; height: 1px; padding-top: 260px; margin-left: -104px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
Tomcat Server
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="35" y="264" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
Tomcat Server
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="100" y="240" width="60" height="560" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)rotate(-90 65 260)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 278px; height: 1px; padding-top: 260px; margin-left: -74px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
Spring Web App Context
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="65" y="264" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
Spring Web App Context
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="160" y="240" width="60" height="560" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)rotate(-90 95 260)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 278px; height: 1px; padding-top: 260px; margin-left: -44px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
BaseServlet
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="95" y="264" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
BaseServlet
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="220" y="240" width="820" height="100" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 408px; height: 1px; padding-top: 145px; margin-left: 111px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
Operation Providers ($evaluate-measure)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="315" y="149" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
Operation Providers ($evaluate-measure)
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="220" y="340" width="260" height="80" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 128px; height: 1px; padding-top: 190px; margin-left: 111px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
CQL Evaluator
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="175" y="194" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
CQL Evaluator
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="440" y="620" width="300" height="120" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 148px; height: 1px; padding-top: 340px; margin-left: 221px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
FHIR Resource Daos
|
||||
<br/>
|
||||
(Resource
|
||||
<br/>
|
||||
CRUD + Searches)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="295" y="344" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
FHIR Resource Daos...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="220" y="620" width="220" height="120" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 108px; height: 1px; padding-top: 340px; margin-left: 111px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
System Dao
|
||||
<br/>
|
||||
(Transactions)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="165" y="344" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
System Dao...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="740" y="620" width="300" height="120" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 148px; height: 1px; padding-top: 340px; margin-left: 371px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
Terminology Service
|
||||
<br/>
|
||||
(Hierarchical Code Searches, Expansion)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="445" y="344" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
Terminology Service...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<path d="M 760 950 C 760 933.43 798.06 920 845 920 C 867.54 920 889.16 923.16 905.1 928.79 C 921.04 934.41 930 942.04 930 950 L 930 1090 C 930 1106.57 891.94 1120 845 1120 C 798.06 1120 760 1106.57 760 1090 Z" fill="#ffffff" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="all"/>
|
||||
<path d="M 930 950 C 930 966.57 891.94 980 845 980 C 798.06 980 760 966.57 760 950" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 83px; height: 1px; padding-top: 523px; margin-left: 381px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
Lucene
|
||||
<br/>
|
||||
(Termnology Index)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="423" y="526" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
Lucene...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="220" y="740" width="820" height="60" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 408px; height: 1px; padding-top: 385px; margin-left: 111px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
JPA (Hibernate)
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="315" y="389" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
JPA (Hibernate)
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<path d="M 630 800 L 630 820 Q 630 840 650 840 L 826 840 Q 846 840 845.75 860 L 845.16 907.26" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="stroke"/>
|
||||
<path d="M 845.03 917.76 L 838.2 903.68 L 845.16 907.26 L 852.2 903.85 Z" fill="#000000" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="all"/>
|
||||
<path d="M 630 800 L 630 820 Q 630 840 610 840 L 416 840 Q 396 840 395.75 860 L 395.16 907.26" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="stroke"/>
|
||||
<path d="M 395.03 917.76 L 388.2 903.68 L 395.16 907.26 L 402.2 903.85 Z" fill="#000000" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="all"/>
|
||||
<rect x="740" y="340" width="300" height="80" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 148px; height: 1px; padding-top: 190px; margin-left: 371px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
CQL Engine
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="445" y="194" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
CQL Engine
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="480" y="340" width="260" height="80" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 128px; height: 1px; padding-top: 190px; margin-left: 241px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
CQL Translator
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="305" y="194" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
CQL Translator
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="800" y="500" width="240" height="120" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 280px; margin-left: 401px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
JPA
|
||||
<br/>
|
||||
TerminologyProvider
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="460" y="284" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
JPA...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="640" y="500" width="160" height="120" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 78px; height: 1px; padding-top: 280px; margin-left: 321px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
JPA
|
||||
<br/>
|
||||
FhirRetrieve
|
||||
<br/>
|
||||
Provider
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="360" y="284" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
JPA...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="460" y="500" width="180" height="120" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 88px; height: 1px; padding-top: 280px; margin-left: 231px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
LibraryContent
|
||||
<br/>
|
||||
Provider
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="275" y="284" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
LibraryContent...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="360" y="500" width="100" height="120" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 48px; height: 1px; padding-top: 280px; margin-left: 181px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
HAPI
|
||||
<br/>
|
||||
ELM
|
||||
<br/>
|
||||
Cache
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="205" y="284" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
HAPI...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<path d="M 630 240 L 630 86" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" pointer-events="stroke"/>
|
||||
<ellipse cx="630" cy="80" rx="6" ry="6" fill="none" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 1px; height: 1px; padding-top: 63px; margin-left: 315px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 11px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; background-color: #ffffff; white-space: nowrap; ">
|
||||
REST API
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="315" y="66" fill="#000000" font-family="Helvetica" font-size="11px" text-anchor="middle">
|
||||
REST API
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="220" y="500" width="140" height="120" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 68px; height: 1px; padding-top: 280px; margin-left: 111px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
HAPI
|
||||
<br/>
|
||||
FhirDal
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="145" y="284" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
HAPI...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<path d="M 1200 620 L 1140 620 Q 1120 620 1120 640 L 1120 690 Q 1120 710 1100 710 L 1060 710 Q 1040 710 1060 710 L 1100 710 Q 1120 710 1120 730 L 1120 780 Q 1120 800 1140 800 L 1200 800" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" transform="translate(1120,0)scale(-1,1)translate(-1120,0)" pointer-events="all"/>
|
||||
<rect x="1200" y="690" width="240" height="40" fill="none" stroke="none" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe flex-start; width: 118px; height: 1px; padding-top: 355px; margin-left: 602px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: left; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
HAPI Data Access Layer
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="602" y="359" fill="#000000" font-family="Helvetica" font-size="12px">
|
||||
HAPI Data Access Lay...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<path d="M 1200 500 L 1140 500 Q 1120 500 1120 520 L 1120 540 Q 1120 560 1100 560 L 1060 560 Q 1040 560 1060 560 L 1100 560 Q 1120 560 1120 580 L 1120 600 Q 1120 620 1140 620 L 1200 620" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" transform="translate(1120,0)scale(-1,1)translate(-1120,0)" pointer-events="all"/>
|
||||
<rect x="1200" y="520" width="320" height="80" fill="none" stroke="none" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe flex-start; width: 158px; height: 1px; padding-top: 280px; margin-left: 602px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: left; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
HAPI CQL Interface Implementations
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="602" y="284" fill="#000000" font-family="Helvetica" font-size="12px">
|
||||
HAPI CQL Interface Impleme...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<path d="M 1200 420 L 1140 420 Q 1120 420 1120 440 L 1120 450 Q 1120 460 1100 460 L 1060 460 Q 1040 460 1060 460 L 1100 460 Q 1120 460 1120 480 L 1120 490 Q 1120 500 1140 500 L 1200 500" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" transform="translate(1120,0)scale(-1,1)translate(-1120,0)" pointer-events="all"/>
|
||||
<path d="M 170 750 L 150 750 Q 130 750 130 770 L 130 820 Q 130 840 110 840 L 100 840 Q 90 840 110 840 L 120 840 Q 130 840 130 860 L 130 910 Q 130 930 150 930 L 170 930" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" transform="translate(0,840)scale(1,-1)translate(0,-840)rotate(-270,130,840)" pointer-events="all"/>
|
||||
<rect x="10" y="900" width="240" height="40" fill="none" stroke="none" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 118px; height: 1px; padding-top: 460px; margin-left: 6px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
Server
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="65" y="464" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
Server
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="1200" y="420" width="360" height="80" fill="none" stroke="none" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe flex-start; width: 178px; height: 1px; padding-top: 230px; margin-left: 602px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: left; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
CQL Interfaces
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="602" y="234" fill="#000000" font-family="Helvetica" font-size="12px">
|
||||
CQL Interfaces
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="1200" y="340" width="280" height="80" fill="none" stroke="none" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe flex-start; width: 138px; height: 1px; padding-top: 190px; margin-left: 602px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: left; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
Core CQL modules
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="602" y="194" fill="#000000" font-family="Helvetica" font-size="12px">
|
||||
Core CQL modules
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<path d="M 1200 340 L 1140 340 Q 1120 340 1120 360 L 1120 370 Q 1120 380 1100 380 L 1060 380 Q 1040 380 1060 380 L 1100 380 Q 1120 380 1120 400 L 1120 410 Q 1120 420 1140 420 L 1200 420" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" transform="translate(1120,0)scale(-1,1)translate(-1120,0)" pointer-events="all"/>
|
||||
<path d="M 1200 240 L 1140 240 Q 1120 240 1120 260 L 1120 275 Q 1120 290 1100 290 L 1060 290 Q 1040 290 1060 290 L 1100 290 Q 1120 290 1120 310 L 1120 325 Q 1120 340 1140 340 L 1200 340" fill="none" stroke="#000000" stroke-width="2" stroke-miterlimit="10" transform="translate(1120,0)scale(-1,1)translate(-1120,0)" pointer-events="all"/>
|
||||
<rect x="1200" y="250" width="280" height="80" fill="none" stroke="none" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe flex-start; width: 138px; height: 1px; padding-top: 145px; margin-left: 602px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: left; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
FHIR Operation Implementations
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="602" y="149" fill="#000000" font-family="Helvetica" font-size="12px">
|
||||
FHIR Operation Implemen...
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
<rect x="220" y="420" width="820" height="80" fill="#ffffff" stroke="#000000" stroke-width="2" pointer-events="all"/>
|
||||
<g transform="translate(-0.5 -0.5)scale(2)">
|
||||
<switch>
|
||||
<foreignObject style="overflow: visible; text-align: left;" pointer-events="none" width="100%" height="100%" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="display: flex; align-items: unsafe center; justify-content: unsafe center; width: 408px; height: 1px; padding-top: 230px; margin-left: 111px;">
|
||||
<div style="box-sizing: border-box; font-size: 0; text-align: center; ">
|
||||
<div style="display: inline-block; font-size: 12px; font-family: Helvetica; color: #000000; line-height: 1.2; pointer-events: all; white-space: normal; word-wrap: normal; ">
|
||||
CQL Interfaces
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</foreignObject>
|
||||
<text x="315" y="234" fill="#000000" font-family="Helvetica" font-size="12px" text-anchor="middle">
|
||||
CQL Interfaces
|
||||
</text>
|
||||
</switch>
|
||||
</g>
|
||||
</g>
|
||||
<switch>
|
||||
<g requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"/>
|
||||
<a transform="translate(0,-5)" xlink:href="https://www.diagrams.net/doc/faq/svg-export-text-problems" target="_blank">
|
||||
<text text-anchor="middle" font-size="10px" x="50%" y="100%">
|
||||
Viewer does not support full SVG 1.1
|
||||
</text>
|
||||
</a>
|
||||
</switch>
|
||||
</svg>
|
After Width: | Height: | Size: 43 KiB |
|
@ -0,0 +1,126 @@
|
|||
@startuml styling
|
||||
|
||||
'These are Alphora colors, they need
|
||||
' to be updated to reflect the HAPI style guide
|
||||
!$white = "#fff"
|
||||
|
||||
!$greylt000 = "#F4F6F5"
|
||||
!$greylt100 = "#DDE3E0"
|
||||
!$greylt200 = "#C7D1CC"
|
||||
!$greylt300 = "#B0BFB8"
|
||||
!$grey = $greylt000
|
||||
|
||||
!$greydk000 = "#404F47"
|
||||
!$greydk100 = "#2E3833"
|
||||
!$greydk200 = "#1C221F"
|
||||
!$greydk250 = "#121614"
|
||||
!$greydk300 = "#090B0A"
|
||||
!$black = $greydk300
|
||||
|
||||
!$blue000 = "#4B7DD2"
|
||||
!$blue100 = "#3064BF"
|
||||
!$blue200 = "#2956A3"
|
||||
!$blue300 = "#214583"
|
||||
!$blue = $blue100
|
||||
|
||||
!$purple000 = "#645FAB"
|
||||
!$purple100 = "#524D93"
|
||||
!$purple200 = "#433F78"
|
||||
!$purple300 = "#34315E"
|
||||
!$purple = $purple100
|
||||
|
||||
!$green000 = "#568A67"
|
||||
!$green100 = "#477154"
|
||||
!$green200 = "#375841"
|
||||
!$green300 = "#273F2F"
|
||||
!$green = $green100
|
||||
|
||||
!$yellow000 = "#FBB337"
|
||||
!$yellow100 = "#FAA40F"
|
||||
!$yellow200 = "#DC8D04"
|
||||
!$yellow300 = "#B47304"
|
||||
!$yellow = $yellow100
|
||||
|
||||
!$red000 = "#FF6633"
|
||||
!$red100 = "#FF4000"
|
||||
!$red200 = "#E03800"
|
||||
!$red300 = "#B82E00"
|
||||
!$red = $red100
|
||||
|
||||
skinparam {
|
||||
defaultFontName Source Sans Pro
|
||||
|
||||
TitleFontStyle bold
|
||||
|
||||
BackgroundColor $white
|
||||
Shadowing false
|
||||
|
||||
ArrowColor $greydk000
|
||||
ArrowFontColor $black
|
||||
ArrowFontSize 12
|
||||
ArrowFont Open Sans
|
||||
|
||||
DelayFontColor $black
|
||||
DelayFontSize 12
|
||||
|
||||
ActorBorderColor $black
|
||||
ActorBackgroundColor $white
|
||||
ActorFontColor $black
|
||||
ActorFontSize 14
|
||||
ActorFontStyle bold
|
||||
|
||||
ParticipantBorderColor $black
|
||||
ParticipantBackgroundColor $yellow
|
||||
ParticipantFontColor $black
|
||||
ParticipantFontSize 14
|
||||
ParticipantFontStyle bold
|
||||
|
||||
DatabaseBorderColor $black
|
||||
DatabaseBackgroundColor $yellow
|
||||
DatabaseFontColor $black
|
||||
DatabaseFontSize 14
|
||||
DatabaseFontStyle bold
|
||||
|
||||
}
|
||||
|
||||
|
||||
skinparam Sequence {
|
||||
MessageAlign center
|
||||
|
||||
LifeLineBorderColor $black
|
||||
' loop, alt, ref
|
||||
GroupBodyBackgroundColor $white
|
||||
GroupBackgroundColor $blue
|
||||
GroupHeaderFontColor $white
|
||||
GroupHeaderFontSize 12
|
||||
GroupFontSize 12
|
||||
|
||||
BoxBackgroundColor $greylt000
|
||||
BoxBorderColor $black
|
||||
BoxFontColor $black
|
||||
BoxFontSize 12
|
||||
|
||||
|
||||
ReferenceBorderColor $black
|
||||
ReferenceFontColor $black
|
||||
ReferenceFontSize 12
|
||||
ReferenceHeaderBackgroundColor $blue
|
||||
|
||||
DividerBackgroundColor $blue
|
||||
DividerBorderColor $black
|
||||
DividerFontColor $white
|
||||
DividerFontSize 12
|
||||
}
|
||||
|
||||
skinparam Note {
|
||||
BackgroundColor $green
|
||||
BorderColor $black
|
||||
FontColor $white
|
||||
FontStyle bold
|
||||
FontSize 12
|
||||
Font Open Sans
|
||||
}
|
||||
|
||||
hide footbox
|
||||
|
||||
@enduml
|
|
@ -25,14 +25,19 @@ There are two Spring beans available that add CQL processing to HAPI. You can en
|
|||
* `ca.uhn.fhir.cql.config.CqlDstu3Config`
|
||||
* `ca.uhn.fhir.cql.config.CqlR4Config`
|
||||
|
||||
## Operations
|
||||
## Clinical Reasoning Operations
|
||||
|
||||
HAPI provides implementations for some Measure operations for DSTU3 and R4
|
||||
HAPI provides implementations for some operations in DSTU3 and R4:
|
||||
|
||||
### $evaluate-measure
|
||||
[CQL Measure](cql_measure.html)
|
||||
|
||||
The [$evaluate-measure](http://hl7.org/fhir/measure-operation-evaluate-measure.html) operation allows the evaluation of a clinical quality measure. This operation is invoked on an instance of a Measure resource:
|
||||
## Roadmap
|
||||
|
||||
`http://base/Measure/measureId/$evaluate-measure?subject=124&periodStart=2014-01&periodend=2014-03`
|
||||
Further development of the CQL capabilities in HAPI is planned:
|
||||
|
||||
The Measure will be evaluated, including any CQL that is referenced. The CQL evaluation requires that all the supporting knowledge artifacts for a given Measure be loaded on the HAPI server, including `Libaries` and `ValueSets`.
|
||||
* Additional features and performance enhancements for Measure evaluation
|
||||
* Additional FHIR Clinical Reasoning Module operations:
|
||||
* Library $evaluate
|
||||
* PlanDefinition $apply
|
||||
* Support for the CPG IG Operations
|
||||
* $cql
|
||||
|
|
|
@ -0,0 +1,406 @@
|
|||
# CQL Measure
|
||||
|
||||
## Introduction
|
||||
|
||||
The FHIR Clinical Reasoning Module defines the [Measure resource](https://www.hl7.org/fhir/measure.html) and several [associated operations](https://www.hl7.org/fhir/measure-operations.html). The Measure Resource represents a structured, computable definition of a health-related measure such as a clinical quality measure, public health indicator, or population analytics measure. These Measures can then be used for reporting, analytics, and data-exchange purposes.
|
||||
|
||||
Electronic Clinical Quality Measures (eCQMs) in FHIR are represented as a FHIR Measure resource containing metadata and terminology, a population criteria section, and at least one FHIR Library resource containing a data criteria section as well as the logic used to define the population criteria. The population criteria section typically contains initial population criteria, denominator criteria, and numerator criteria sub-components, among others. This is elaborated upon in greater detail in the [CQF Measures IG](http://hl7.org/fhir/us/cqfmeasures). An example of an eCQM as defined in FHIR looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType" : "Measure",
|
||||
"library" : [
|
||||
"http://hl7.org/fhir/us/cqfmeasures/Library/EXMLogic"
|
||||
],
|
||||
"group" : [
|
||||
{
|
||||
"population" : [
|
||||
{
|
||||
"code" : {
|
||||
"coding" : [
|
||||
{
|
||||
"code" : "initial-population"
|
||||
}
|
||||
]
|
||||
},
|
||||
"criteria" : {
|
||||
"language" : "text/cql.identifier",
|
||||
"expression" : "Initial Population"
|
||||
}
|
||||
},
|
||||
{
|
||||
"code" : {
|
||||
"coding" : [
|
||||
{
|
||||
"code" : "numerator"
|
||||
}
|
||||
]
|
||||
},
|
||||
"criteria" : {
|
||||
"language" : "text/cql.identifier",
|
||||
"expression" : "Numerator"
|
||||
}
|
||||
},
|
||||
{
|
||||
"code" : {
|
||||
"coding" : [
|
||||
{
|
||||
"code" : "denominator"
|
||||
}
|
||||
]
|
||||
},
|
||||
"criteria" : {
|
||||
"language" : "text/cql.identifier",
|
||||
"expression" : "Denominator"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
Measures are then scored according the whether a subjects (or subjects) are members of the various populations.
|
||||
|
||||
For example, a Measure for Breast Cancer screening might define an Initial Population (via CQL expressions) of "all women", a Denominator of "women over 35", and a Numerator of "women over 35 who have had breast cancer screenings in the past year". If the Measure is evaluated against a population of 100 women, 50 are over 35, and of those 25 have had breast cancer screenings in the past year, the final score would be 50%<sup>1</sup> (total number in numerator / total number in the denominator).
|
||||
|
||||
1. There are several methods for scoring Measures, this is meant only as an example.
|
||||
|
||||
## Operations
|
||||
|
||||
HAPI implements the [$evaluate-measure](https://www.hl7.org/fhir/operation-measure-evaluate-measure.html) operation. Support for additional operations is planned.
|
||||
|
||||
## Evaluate Measure
|
||||
|
||||
The `$evaluate-measure` operation is used to execute a Measure as specified by the relevant FHIR Resources against a subject or set of subjects. This implementation currently focuses primarily on supporting the narrower evaluation requirements defined by the [CQF Measures IG](http://hl7.org/fhir/us/cqfmeasures). Some support for extensions defined by other IGs is included as well, and the implementation aims to support a wider range of functionality in the future.
|
||||
|
||||
### Example Measure
|
||||
|
||||
Several example Measures are available in the [ecqm-content-r4](https://github.com/cqframework/ecqm-content-r4) IG. Full Bundles with all the required supporting resources are available [here](https://github.com/cqframework/ecqm-content-r4/tree/master/bundles/measure). You can download a Bundle and load it on your server as a transaction:
|
||||
|
||||
```bash
|
||||
POST http://your-server-base/fhir BreastCancerScreeningFHIR-bundle.json
|
||||
```
|
||||
|
||||
These Bundles also include example Patient clinical data so once posted Measure evaluation can be invoked with:
|
||||
|
||||
```bash
|
||||
GET http://your-server-base/fhir/Measure/BreastCancerScreeningFHIR/$evaluate-measure?periodStart=2019-01-01&periodEnd=2019-12-31&subject=numerator&reportType=subject
|
||||
```
|
||||
|
||||
### Measure Features
|
||||
|
||||
The FHIR Measure specification defines several different types of Measures and various parameters for controlling the Measure evaluation. This section describes the features supported by HAPI.
|
||||
|
||||
#### Reporting Period
|
||||
|
||||
The `periodStart` and `periodEnd` parameters are used to control the Reporting Period for which a report is generated. This corresponds to `Measurement Period` defined in the CQL logic, as defined by the conformance requirements in the CQF Measures IG. Both `periodStart` and `periodEnd` must be used or neither must be used.
|
||||
|
||||
If neither are used the default reporting period specified in the CQL logic is used, as shown here
|
||||
|
||||
```cql
|
||||
parameter "Measurement Period" Interval<DateTime>
|
||||
default Interval[@2019-01-01T00:00:00.0, @2020-01-01T00:00:00.0)
|
||||
```
|
||||
|
||||
If neither are used and there is no default reporting period in the CQL logic an error is thrown.
|
||||
|
||||
A request using `periodStart` and `periodEnd` looks like:
|
||||
|
||||
```bash
|
||||
GET fhir/Measure/<MeasureId>/$evaluate-measure?periodStart=2019-01-01&periodEnd=2019-12-31
|
||||
```
|
||||
|
||||
`periodStart` and `periodEnd` support Dates (YYYY, YYYY-MM, or YYYY-MM-DD) and DateTimes (YYYY-MM-DDThh:mm:ss+zz:zz)
|
||||
|
||||
#### Report Types
|
||||
|
||||
Measure report types determine what data is returned from the evaluation. This is controlled with the `reportType` parameter on the $evaluate-measure Operation
|
||||
|
||||
| Report Type | Supported | Description |
|
||||
| ------------ | :----------------: | -------------------------------------------------------------------------------------------------------------- |
|
||||
| subject | :white_check_mark: | Measure report for a single subject (e.g. one patient). Includes additional detail, such as evaluatedResources |
|
||||
| subject-list | :white_check_mark: | Measure report including the list of subjects in each population (e.g. all the patients in the "numerator") |
|
||||
| population | :white_check_mark: | Summary measure report for a population |
|
||||
|
||||
NOTE: There's an open issue on the FHIR specification to align these names to the MeasureReportType value set.
|
||||
|
||||
A request using `reportType` looks like:
|
||||
|
||||
```bash
|
||||
GET fhir/Measure/<MeasureId>/$evaluate-measure?reportType=subject-list
|
||||
```
|
||||
|
||||
#### Subject Types
|
||||
|
||||
The subject of a measure evaluation is controlled with the `subject` (R4+) and `patient` (DSTU3) operation parameters. Currently the only subject type supported by HAPI is Patient. This means that all Measure evaluation and reporting happens with respect to a Patient or set of Patient resources.
|
||||
|
||||
| Subject Type | Supported | Description |
|
||||
| ----------------- | :------------------: | ----------------- |
|
||||
| Patient | :white_check_mark: | A Patient |
|
||||
| Practitioner | :white_large_square: | A Practitioner |
|
||||
| Organization | :white_large_square: | An Organization |
|
||||
| Location | :white_large_square: | A Location |
|
||||
| Device | :white_large_square: | A Device |
|
||||
| Group<sup>1</sup> | :white_large_square: | A set of subjects |
|
||||
|
||||
1. See next section
|
||||
|
||||
A request using `subject` looks like:
|
||||
|
||||
```bash
|
||||
GET fhir/Measure/<MeasureId>/$evaluate-measure?subject=Patient/123
|
||||
```
|
||||
|
||||
##### Selecting a set of Patients
|
||||
|
||||
The set of Patients used for Measure evaluation is controlled with the `subject` (R4+) or `patient` (DSTU3), and `practitioner` parameters. The two parameters are mutually exclusive.
|
||||
|
||||
| Parameter | Supported | Description |
|
||||
| ----------------------------------------------------- | :------------------: | ----------------------------------------------------------------------- |
|
||||
| Not specified | :white_check_mark: | All Patients on the server |
|
||||
| `subject=XXX` or `subject=Patient/XXX` | :white_check_mark: | A single Patient |
|
||||
| `practitioner=XXX` or `practitioner=Practitioner/XXX` | :white_check_mark: | All Patients whose `generalPractitioner` is the referenced Practitioner |
|
||||
| `subject=Group/XXX`<sup>1</sup> | :white_large_square: | A Group containing subjects |
|
||||
| `subject=XXX` AND `practitioner=XXX` | :x: | Not a valid combination |
|
||||
|
||||
1. Referencing a Group of Patients as the subject is defined in the ATR IG and is on the roadmap. This will allow much more control over which Patients are included in the evaluated set.
|
||||
|
||||
A request using `practitioner` looks like:
|
||||
|
||||
```bash
|
||||
GET fhir/Measure/<MeasureId>/$evaluate-measure?practitioner=Practitioner/XYZ
|
||||
```
|
||||
|
||||
#### ReportType, Subject, Practitioner Matrix
|
||||
|
||||
The following table shows the combinations of the `subject` (or `patient`), `practitioner` and `reportType` parameters that are valid
|
||||
|
||||
| | subject reportType | subject-list reportType | population reportType |
|
||||
| ---------------- | :----------------: | :-------------------------------: | :-------------------------------: |
|
||||
| subject parameter | :white_check_mark: | :white_check_mark: <sup>1,2</sup> | :white_check_mark: <sup>1,2</sup> |
|
||||
| practitioner parameter | :x:<sup>3</sup> | :white_check_mark: | :white_check_mark: |
|
||||
|
||||
1. Including the subject parameter restricts the Measure evaluation to a single Patient. Omit the `subject` (or `patient`) parameter to get report for multiple Patients. The subject-list and population report types have less detail than a subject report.
|
||||
2. A Group `subject` with a subject-list or population `reportType` will be a valid combination once Group support is implemented.
|
||||
3. A practitioner have may zero, one, or many patients so a practitioner report always assumes a set.
|
||||
|
||||
#### Scoring Methods
|
||||
|
||||
The Measure scoring method determines how a Measure score is calculated. It is set with the [scoring](https://www.hl7.org/fhir/measure-definitions.html#Measure.scoring) element on the Measure resource.
|
||||
|
||||
The HAPI implementation conforms to the requirements defined by the CQF Measures IG. A more detailed description of each scoring method is linked in the table below.
|
||||
|
||||
| Scoring Method | Supported | Description |
|
||||
| ------------------- | :------------------: | ---------------------------------------------------------------------------------------------------------------------- |
|
||||
| proportion | :white_check_mark: | [Proportion Measures](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#proportion-measures) |
|
||||
| ratio | :white_check_mark: | [Ratio Measures](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#ratio-measures) |
|
||||
| continuous-variable | :white_check_mark: | [Continuous Variable](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#continuous-variable-measure) |
|
||||
| cohort | :white_check_mark:* | [Cohort](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#cohort-definitions) |
|
||||
| composite | :white_large_square: | See below |
|
||||
|
||||
* The cohort Measure scoring support is partial. The HAPI implementation does not yet return the required Measure observations
|
||||
|
||||
An example Measure resource with `scoring` defined looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Measure",
|
||||
"scoring": {
|
||||
"coding": [ {
|
||||
"system": "http://terminology.hl7.org/CodeSystem/measure-scoring",
|
||||
"code": "proportion",
|
||||
"display": "Proportion"
|
||||
} ]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
##### Composite Scoring
|
||||
|
||||
A composite Measure is scored by combining and/or aggregating the results of other Measures. The [compositeScoring](https://www.hl7.org/fhir/measure-definitions.html#Measure.compositeScoring) element is used to control how composite Measures are scored. HAPI does not currently support any composite scoring method.
|
||||
|
||||
| Composite Scoring Method | Supported | Description |
|
||||
| ------------------------ | :------------------: | ---------------------------------------------------------------------------------------------- |
|
||||
| opportunity | :white_large_square: | Combines Numerators and Denominators for each component Measure |
|
||||
| all-or-nothing | :white_large_square: | Includes individuals that are in the numerator for all component Measures |
|
||||
| linear | :white_large_square: | Gives an individual score based on the number of numerators in which they appear |
|
||||
| weighted | :white_large_square: | Gives an individual a cored based on a weighted factor for each numerator in which they appear |
|
||||
|
||||
#### Populations
|
||||
|
||||
The HAPI implementation uses the populations defined by the CQF Measures IG for each scoring type. A matrix of the supported populations is shown in the [Criteria Names](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#criteria-names) section of the CQF Measures IG.
|
||||
|
||||
#### Population Criteria
|
||||
|
||||
The logical criteria used for determining each Measure population is defined by the [Measure.group.population.criteria](https://hl7.org/fhir/R4/measure-definitions.html#Measure.group.population.criteria) element. The Measure specification allows population criteria to be defined using FHIR Path, CQL, or other languages as appropriate. The HAPI implementation currently only supports using CQL. The relationship between a Measure Population and CQL is illustrated in the [Population Criteria](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#population-criteria) section of the CQF Measures IG.
|
||||
|
||||
An example Measure resource with a population criteria referencing a CQL identifier looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Measure",
|
||||
"group": [ {
|
||||
"population": [ {
|
||||
"code": {
|
||||
"coding": [ {
|
||||
"system": "http://terminology.hl7.org/CodeSystem/measure-population",
|
||||
"code": "initial-population",
|
||||
"display": "Initial Population"
|
||||
} ]
|
||||
},
|
||||
"criteria": {
|
||||
"language": "text/cql.identifier",
|
||||
"expression": "Initial Population"
|
||||
}
|
||||
}]
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
##### Criteria Expression Type
|
||||
|
||||
| Expression Type | Supported |
|
||||
| --------------- | :------------------: |
|
||||
| CQL | :white_check_mark: |
|
||||
| FHIR Path | :white_large_square: |
|
||||
|
||||
#### Supplemental Data Elements
|
||||
|
||||
Supplemental Data Elements are used to report additional information about the subjects that may not be included in the in the Population criteria definitions. For example, it may be of interest to report the gender of all subjects for informational purposes. Supplemental data elements are defined by the [Measure.supplementalData](http://www.hl7.org/fhir/measure-definitions.html#Measure.supplementalData) element, and are reported as Observations in the evaluatedResources of the MeasureReport.
|
||||
|
||||
Supplemental Data Elements can be specified as either CQL definitions or FHIR Path expressions.
|
||||
|
||||
| Expression Type | Supported |
|
||||
| --------------- | :------------------: |
|
||||
| CQL | :white_check_mark: |
|
||||
| FHIR Path | :white_large_square: |
|
||||
|
||||
An example Measure resource with some supplemental data elements set looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Measure",
|
||||
"supplementalData": [ {
|
||||
"code": {
|
||||
"text": "sde-ethnicity"
|
||||
},
|
||||
"criteria": {
|
||||
"language": "text/cql.identifier",
|
||||
"expression": "SDE Ethnicity"
|
||||
}
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
#### Stratifiers
|
||||
|
||||
Stratifiers are used divide Measure populations into segments of interest. For example, it may be of interest to compare the Measure score between different age groups or genders. Each stratum within a stratification is scored the same way as the overall population. Stratifiers are defined using the [Measure.group.stratifier](http://hl7.org/fhir/R4/measure-definitions.html#Measure.group.stratifier) element.
|
||||
|
||||
HAPI does not implement stratifier support but it's on the roadmap.
|
||||
|
||||
An example Measure resource with a stratifier set looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Measure",
|
||||
"group": [ {
|
||||
"stratifier": [ {
|
||||
"code": {
|
||||
"text": "Stratum 1"
|
||||
},
|
||||
"criteria": {
|
||||
"language": "text/cql.identifier",
|
||||
"expression": "Stratification 1"
|
||||
}
|
||||
}]
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
##### Stratifier Expression Support
|
||||
|
||||
As with Populations and Supplemental Data Elements the criteria used for Stratification may be defined with CQL or FHIR Path.
|
||||
|
||||
| Expression Type | Supported |
|
||||
| --------------- | :------------------: |
|
||||
| CQL | :white_large_square: |
|
||||
| FHIR Path | :white_large_square: |
|
||||
|
||||
##### Stratifier Component Support
|
||||
|
||||
The Measure specification also supports multi-dimensional stratification, for cases where more than one data element is needed.
|
||||
|
||||
| Stratifier Type | Supported |
|
||||
| ---------------- | :------------------: |
|
||||
| Single Component | :white_large_square: |
|
||||
| Multi Component | :white_large_square: |
|
||||
|
||||
#### Evaluated Resources
|
||||
|
||||
A FHIR MeasureReport permits referencing the Resources used when evaluating in the [MeasureReport.evaluatedResource](https://www.hl7.org/fhir/measurereport-definitions.html#MeasureReport.evaluatedResource) element. HAPI includes these resources when generating `subject` reports for a single Patient. Evaluated resources for `population` or `subject-list` reports are not included. For large populations this could quickly become an extremely large number of resources.
|
||||
|
||||
The evaluated resources will not include every resource on the HAPI server for a given subject. Rather, it includes only the resources that were retrieved from the server by the CQL logic that was evaluated. This corresponds to the data-requirements for a given Measure. As an example, consider the following CQL:
|
||||
|
||||
```cql
|
||||
valueset "Example Value Set" : 'http://fhir.org/example-value-set'
|
||||
|
||||
define "Example Observations":
|
||||
[Observation : "Example Value Set"]
|
||||
```
|
||||
|
||||
That CQL will only select Observation Resources that have a code in the "Example Value Set". Those Observations will be reported in the Evaluated Resources while any others will not.
|
||||
|
||||
#### Last Received On
|
||||
|
||||
The `lastReceivedOn` parameter is the date the Measure was evaluated and reported. It is used to limit the number of resources reported in the Measure report for individual reports. It is currently not supported by HAPI.
|
||||
|
||||
#### Extensions
|
||||
|
||||
A number of extensions to Measure evaluation defined by various IGs are supported. They are described briefly in the table below.
|
||||
|
||||
| Extension | Description |
|
||||
| --------- | ----------- |
|
||||
| http://hl7.org/fhir/us/cqframework/cqfmeasures/StructureDefinition/cqfm-productLine | Used to evaluate different product lines (e.g. Medicare, Private, etc.) |
|
||||
| http://hl7.org/fhir/StructureDefinition/cqf-measureInfo | Used to demark a Measure Observation |
|
||||
| http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/extension-populationReference | Used to specify the population that triggered a particular `evaluatedResource`|
|
||||
|
||||
There's not currently a way to configure which extensions are enabled. All supported extensions are always enabled.
|
||||
|
||||
## Architecture
|
||||
|
||||
Below are a few diagrams that show the overall architecture of Measure evaluation and how it fits into the HAPI FHIR Server.
|
||||
|
||||
### Component Diagram
|
||||
|
||||
This is a simplified component diagram of the Measure evaluation architecture
|
||||
|
||||
![Measure Evaluation Architecture](/hapi-fhir/docs/images/ref_measure_architecture_drawio.svg)
|
||||
|
||||
### Sequence Chart
|
||||
|
||||
This sequence chart approximates the Measure evaluation logic implemented by HAPI.
|
||||
|
||||
![Measure Evaluation Sequence Chart](/hapi-fhir/docs/images/measure_evaluation_sequence.png)
|
||||
|
||||
## FAQs
|
||||
|
||||
Q: I get an error saying HAPI can't locate my library, and I've verified it's on the server.
|
||||
|
||||
A: HAPI follows the [Library conformance requirements](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#conformance-requirement-3-1) defined by the CQF Measures IG, meaning the Library must have a `logic-library` type, the name and versions of the FHIR Library and CQL Library must match, and the url of the Library must end in the name of the Library.
|
||||
|
||||
FHIR Libraries generated from CQL via the IG Publisher follow these requirements automatically.
|
||||
|
||||
Q: Does HAPI support partitions for evaluation?
|
||||
|
||||
A: Yes, though the Measure and associated Resources must be in the same partition as the clinical data being used.
|
||||
|
||||
## Roadmap
|
||||
|
||||
* Complete cohort implementation
|
||||
* Support for stratifiers
|
||||
* Support for Group subjects
|
||||
* Support for FHIRPath expressions in Stratifiers, Supplemental Data Elements, and Population Criteria
|
||||
* `$data-requirements`, `$collect-data`, `$submit-data`, and `$care-gaps` operations
|
||||
* Support for more extensions defined in the CQF Measures, CPG, and ATR IGs
|
|
@ -346,7 +346,7 @@ Sort specifications can be passed into handler methods by adding a parameter of
|
|||
Example URL to invoke this method:
|
||||
|
||||
```url
|
||||
http://fhir.example.com/Patient?_identifier=urn:foo|123&_sort=given
|
||||
http://fhir.example.com/Patient?identifier=urn:foo|123&_sort=given
|
||||
```
|
||||
|
||||
<a name="limiting-results"/>
|
||||
|
@ -364,7 +364,7 @@ of resources fetched from the database.
|
|||
Example URL to invoke this method:
|
||||
|
||||
```url
|
||||
http://fhir.example.com/Patient?_identifier=urn:foo|123&_count=10
|
||||
http://fhir.example.com/Patient?identifier=urn:foo|123&_count=10
|
||||
```
|
||||
|
||||
# Paging
|
||||
|
@ -388,17 +388,17 @@ for more information.
|
|||
Example URL to invoke this method for the first page:
|
||||
|
||||
```url
|
||||
http://fhir.example.com/Patient?_identifier=urn:foo|123&_count=10&_offset=0
|
||||
http://fhir.example.com/Patient?identifier=urn:foo|123&_count=10&_offset=0
|
||||
```
|
||||
or just
|
||||
```url
|
||||
http://fhir.example.com/Patient?_identifier=urn:foo|123&_count=10
|
||||
http://fhir.example.com/Patient?identifier=urn:foo|123&_count=10
|
||||
```
|
||||
|
||||
Example URL to invoke this method for the second page:
|
||||
|
||||
```url
|
||||
http://fhir.example.com/Patient?_identifier=urn:foo|123&_count=10&_offset=10
|
||||
http://fhir.example.com/Patient?identifier=urn:foo|123&_count=10&_offset=10
|
||||
```
|
||||
|
||||
Note that if the paging provider is configured to be database backed, `_offset=0` behaves differently than no `_offset`. This
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.rest.api.Constants;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.reflect.ClassPath;
|
||||
import com.google.common.reflect.ClassPath.ClassInfo;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
@ -74,6 +75,17 @@ public class TestUtil {
|
|||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TestUtil.class);
|
||||
private static Set<String> ourReservedWords;
|
||||
|
||||
|
||||
// Exceptions set because H2 sets indexes for FKs automatically so this index had to be called as the target FK field
|
||||
// it is indexing to avoid SchemaMigrationTest to complain about the extra index (which doesn't exist in H2)
|
||||
private static final Set<String> duplicateNameValidationExceptionList = Sets.newHashSet(
|
||||
"FK_CONCEPTPROP_CONCEPT",
|
||||
"FK_CONCEPTDESIG_CONCEPT",
|
||||
"FK_TERM_CONCEPTPC_CHILD",
|
||||
"FK_TERM_CONCEPTPC_PARENT"
|
||||
);
|
||||
|
||||
|
||||
/**
|
||||
* non instantiable
|
||||
*/
|
||||
|
@ -252,7 +264,8 @@ public class TestUtil {
|
|||
}
|
||||
for (Index nextConstraint : table.indexes()) {
|
||||
assertNotADuplicateName(nextConstraint.name(), theNames);
|
||||
Validate.isTrue(nextConstraint.name().startsWith("IDX_"), nextConstraint.name() + " must start with IDX_");
|
||||
Validate.isTrue(nextConstraint.name().startsWith("IDX_") || nextConstraint.name().startsWith("FK_"),
|
||||
nextConstraint.name() + " must start with IDX_ or FK_ (last one when indexing a FK column)");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,7 +282,9 @@ public class TestUtil {
|
|||
Validate.notNull(fk);
|
||||
Validate.isTrue(isNotBlank(fk.name()), "Foreign key on " + theAnnotatedElement + " has no name()");
|
||||
Validate.isTrue(fk.name().startsWith("FK_"));
|
||||
assertNotADuplicateName(fk.name(), theNames);
|
||||
if ( ! duplicateNameValidationExceptionList.contains(fk.name())) {
|
||||
assertNotADuplicateName(fk.name(), theNames);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.7.0-PRE2-SNAPSHOT</version>
|
||||
<version>5.7.0-PRE3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -156,6 +156,14 @@
|
|||
<version>${project.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-storage</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<classifier>tests</classifier>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
|
|
|
@ -26,6 +26,8 @@ import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
|
|||
import ca.uhn.fhir.jpa.delete.job.DeleteExpungeJobConfig;
|
||||
import ca.uhn.fhir.jpa.reindex.job.ReindexEverythingJobConfig;
|
||||
import ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig;
|
||||
import ca.uhn.fhir.jpa.term.job.TermCodeSystemDeleteJobConfig;
|
||||
import ca.uhn.fhir.jpa.term.job.TermCodeSystemVersionDeleteJobConfig;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
|
@ -38,7 +40,9 @@ import org.springframework.context.annotation.Import;
|
|||
DeleteExpungeJobConfig.class,
|
||||
ReindexJobConfig.class,
|
||||
ReindexEverythingJobConfig.class,
|
||||
MdmClearJobConfig.class
|
||||
MdmClearJobConfig.class,
|
||||
TermCodeSystemDeleteJobConfig.class,
|
||||
TermCodeSystemVersionDeleteJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
}
|
||||
|
|
|
@ -20,20 +20,31 @@ package ca.uhn.fhir.jpa.binstore;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PayloadTooLargeException;
|
||||
import ca.uhn.fhir.util.BinaryUtil;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import com.google.common.hash.HashFunction;
|
||||
import com.google.common.hash.Hashing;
|
||||
import com.google.common.hash.HashingInputStream;
|
||||
import com.google.common.io.ByteStreams;
|
||||
import org.apache.commons.io.input.CountingInputStream;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.security.SecureRandom;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
||||
private final SecureRandom myRandom;
|
||||
|
@ -41,6 +52,8 @@ abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
|||
private final int ID_LENGTH = 100;
|
||||
private int myMaximumBinarySize = Integer.MAX_VALUE;
|
||||
private int myMinimumBinarySize;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
|
||||
BaseBinaryStorageSvcImpl() {
|
||||
myRandom = new SecureRandom();
|
||||
|
@ -104,7 +117,6 @@ abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
|||
};
|
||||
}
|
||||
|
||||
|
||||
String provideIdForNewBlob(String theBlobIdOrNull) {
|
||||
String id = theBlobIdOrNull;
|
||||
if (isBlank(theBlobIdOrNull)) {
|
||||
|
@ -112,4 +124,32 @@ abstract class BaseBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
|||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] fetchDataBlobFromBinary(IBaseBinary theBaseBinary) throws IOException {
|
||||
IPrimitiveType<byte[]> dataElement = BinaryUtil.getOrCreateData(myFhirContext, theBaseBinary);
|
||||
byte[] value = dataElement.getValue();
|
||||
if (value == null) {
|
||||
Optional<String> attachmentId = getAttachmentId((IBaseHasExtensions) dataElement);
|
||||
if (attachmentId.isPresent()) {
|
||||
value = fetchBlob(theBaseBinary.getIdElement(), attachmentId.get());
|
||||
} else {
|
||||
throw new InternalErrorException("Unable to load binary blob data for " + theBaseBinary.getIdElement());
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Optional<String> getAttachmentId(IBaseHasExtensions theBaseBinary) {
|
||||
return theBaseBinary
|
||||
.getExtension()
|
||||
.stream()
|
||||
.filter(t -> HapiExtensions.EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl()))
|
||||
.filter(t -> t.getValue() instanceof IPrimitiveType)
|
||||
.map(t -> (IPrimitiveType<String>) t.getValue())
|
||||
.map(t -> t.getValue())
|
||||
.filter(t -> isNotBlank(t))
|
||||
.findFirst();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.binstore;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
@ -101,4 +102,13 @@ public interface IBinaryStorageSvc {
|
|||
* @return The payload as a byte array
|
||||
*/
|
||||
byte[] fetchBlob(IIdType theResourceId, String theBlobId) throws IOException;
|
||||
|
||||
/**
|
||||
* Fetch the byte[] contents of a given Binary resource's `data` element. If the data is a standard base64encoded string that is embedded, return it.
|
||||
* Otherwise, attempt to load the externalized binary blob via the the externalized binary storage service.
|
||||
*
|
||||
* @param theResourceId The resource ID The ID of the Binary resource you want to extract data bytes from
|
||||
* @return The binary data blob as a byte array
|
||||
*/
|
||||
byte[] fetchDataBlobFromBinary(IBaseBinary theResource) throws IOException;
|
||||
}
|
||||
|
|
|
@ -20,8 +20,10 @@ package ca.uhn.fhir.jpa.binstore;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
|
@ -81,4 +83,9 @@ public class NullBinaryStorageSvcImpl implements IBinaryStorageSvc {
|
|||
public byte[] fetchBlob(IIdType theResourceId, String theBlobId) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] fetchDataBlobFromBinary(IBaseBinary theResource) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -155,8 +155,13 @@ import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValid
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
|
||||
import org.hl7.fhir.utilities.npm.FilesystemPackageCacheManager;
|
||||
import org.springframework.batch.core.configuration.JobRegistry;
|
||||
import org.springframework.batch.core.configuration.annotation.BatchConfigurer;
|
||||
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
|
||||
import org.springframework.batch.core.explore.JobExplorer;
|
||||
import org.springframework.batch.core.launch.JobLauncher;
|
||||
import org.springframework.batch.core.launch.support.SimpleJobOperator;
|
||||
import org.springframework.batch.core.repository.JobRepository;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
@ -228,6 +233,9 @@ public abstract class BaseConfig {
|
|||
private Integer searchCoordMaxPoolSize = 100;
|
||||
private Integer searchCoordQueueCapacity = 200;
|
||||
|
||||
@Autowired
|
||||
private JobLauncher myJobLauncher;
|
||||
|
||||
/**
|
||||
* Subclasses may override this method to provide settings such as search coordinator pool sizes.
|
||||
*/
|
||||
|
@ -278,6 +286,18 @@ public abstract class BaseConfig {
|
|||
return new CascadingDeleteInterceptor(theFhirContext, theDaoRegistry, theInterceptorBroadcaster);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public SimpleJobOperator jobOperator(JobExplorer jobExplorer, JobRepository jobRepository, JobRegistry jobRegistry) {
|
||||
SimpleJobOperator jobOperator = new SimpleJobOperator();
|
||||
|
||||
jobOperator.setJobExplorer(jobExplorer);
|
||||
jobOperator.setJobRepository(jobRepository);
|
||||
jobOperator.setJobRegistry(jobRegistry);
|
||||
jobOperator.setJobLauncher(myJobLauncher);
|
||||
|
||||
return jobOperator;
|
||||
}
|
||||
|
||||
|
||||
@Lazy
|
||||
@Bean
|
||||
|
|
|
@ -8,6 +8,8 @@ import org.springframework.data.jpa.repository.Modifying;
|
|||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
|
@ -30,6 +32,13 @@ import org.springframework.data.repository.query.Param;
|
|||
|
||||
public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryTable, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
/**
|
||||
* This is really only intended for unit tests - There can be many versions of resources in
|
||||
* the real world, use a pageable query for real uses.
|
||||
*/
|
||||
@Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :resId ORDER BY t.myResourceVersion ASC")
|
||||
List<ResourceHistoryTable> findAllVersionsForResourceIdInOrder(@Param("resId") Long theId);
|
||||
|
||||
|
||||
@Query("SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
|
||||
ResourceHistoryTable findForIdAndVersionAndFetchProvenance(@Param("id") long theId, @Param("version") long theVersion);
|
||||
|
|
|
@ -35,8 +35,8 @@ public interface ITermCodeSystemVersionDao extends JpaRepository<TermCodeSystemV
|
|||
@Query("DELETE FROM TermCodeSystemVersion csv WHERE csv.myCodeSystem = :cs")
|
||||
void deleteForCodeSystem(@Param("cs") TermCodeSystem theCodeSystem);
|
||||
|
||||
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid")
|
||||
List<TermCodeSystemVersion> findByCodeSystemPid(@Param("codesystem_pid") Long theCodeSystemPid);
|
||||
@Query("SELECT myId FROM TermCodeSystemVersion WHERE myCodeSystemPid = :codesystem_pid order by myId")
|
||||
List<Long> findSortedPidsByCodeSystemPid(@Param("codesystem_pid") Long theCodeSystemPid);
|
||||
|
||||
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid AND cs.myCodeSystemVersionId = :codesystem_version_id")
|
||||
TermCodeSystemVersion findByCodeSystemPidAndVersion(@Param("codesystem_pid") Long theCodeSystemPid, @Param("codesystem_version_id") String theCodeSystemVersionId);
|
||||
|
|
|
@ -4,8 +4,8 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
|||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
|
@ -40,8 +40,9 @@ public interface ITermConceptDao extends JpaRepository<TermConcept, Long>, IHapi
|
|||
@Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system AND c.myCode = :code")
|
||||
Optional<TermConcept> findByCodeSystemAndCode(@Param("code_system") TermCodeSystemVersion theCodeSystem, @Param("code") String theCode);
|
||||
|
||||
@Query("SELECT t.myId FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid")
|
||||
Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConcept WHERE myCodeSystem.myId = :cs_pid")
|
||||
int deleteByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
||||
|
||||
@Query("SELECT c FROM TermConcept c WHERE c.myCodeSystem = :code_system")
|
||||
List<TermConcept> findByCodeSystemVersion(@Param("code_system") TermCodeSystemVersion theCodeSystem);
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
|
@ -29,10 +28,8 @@ import org.springframework.data.repository.query.Param;
|
|||
|
||||
public interface ITermConceptDesignationDao extends JpaRepository<TermConceptDesignation, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
@Query("SELECT t.myId FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid")
|
||||
Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("csv_pid") Long thePid);
|
||||
|
||||
@Query("SELECT COUNT(t) FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid")
|
||||
Integer countByCodeSystemVersion(@Param("csv_pid") Long thePid);
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConceptDesignation WHERE myCodeSystemVersion.myId = :csv_pid")
|
||||
int deleteByCodeSystemVersion(@Param("csv_pid") Long thePid);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
|
@ -31,13 +30,11 @@ import java.util.Collection;
|
|||
|
||||
public interface ITermConceptParentChildLinkDao extends JpaRepository<TermConceptParentChildLink, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
@Query("SELECT COUNT(t) FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
|
||||
Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
||||
|
||||
@Query("SELECT t.myParentPid FROM TermConceptParentChildLink t WHERE t.myChildPid = :child_pid")
|
||||
Collection<Long> findAllWithChild(@Param("child_pid") Long theConceptPid);
|
||||
|
||||
@Query("SELECT t.myPid FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
|
||||
Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConceptParentChildLink WHERE myCodeSystemVersionPid = :cs_pid")
|
||||
int deleteByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
|
@ -29,8 +28,9 @@ import org.springframework.data.repository.query.Param;
|
|||
|
||||
public interface ITermConceptPropertyDao extends JpaRepository<TermConceptProperty, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
@Query("SELECT t.myId FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid")
|
||||
Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConceptProperty WHERE myCodeSystemVersion.myId = :cs_pid")
|
||||
int deleteByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
||||
|
||||
@Query("SELECT COUNT(t) FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid")
|
||||
Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
||||
|
|
|
@ -40,7 +40,6 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
|||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.MultimapBuilder;
|
||||
import org.apache.commons.lang3.Functions;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
|
@ -316,12 +315,15 @@ public class IdHelperService {
|
|||
TypedQuery<ForcedId> query = myEntityManager.createQuery(criteriaQuery);
|
||||
List<ForcedId> results = query.getResultList();
|
||||
for (ForcedId nextId : results) {
|
||||
ResourcePersistentId persistentId = new ResourcePersistentId(nextId.getResourceId());
|
||||
populateAssociatedResourceId(nextId.getResourceType(), nextId.getForcedId(), persistentId);
|
||||
retVal.add(persistentId);
|
||||
// Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
|
||||
if (nextId.getResourceId() != null) {
|
||||
ResourcePersistentId persistentId = new ResourcePersistentId(nextId.getResourceId());
|
||||
populateAssociatedResourceId(nextId.getResourceType(), nextId.getForcedId(), persistentId);
|
||||
retVal.add(persistentId);
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getForcedId());
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, persistentId);
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getForcedId());
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, persistentId);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.dao.search;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.dao.search;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
|
@ -7,7 +27,12 @@ import ca.uhn.fhir.jpa.model.search.ExtendedLuceneIndexData;
|
|||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Extract search params for advanced lucene indexing.
|
||||
|
@ -26,7 +51,6 @@ public class ExtendedLuceneIndexExtractor {
|
|||
|
||||
@NotNull
|
||||
public ExtendedLuceneIndexData extract(ResourceIndexedSearchParams theNewParams) {
|
||||
// wip mb this is testable now.
|
||||
ExtendedLuceneIndexData retVal = new ExtendedLuceneIndexData(myContext);
|
||||
|
||||
theNewParams.myStringParams.forEach(nextParam ->
|
||||
|
@ -39,7 +63,7 @@ public class ExtendedLuceneIndexExtractor {
|
|||
|
||||
// awkwardly, links are shared between different search params if they use the same path,
|
||||
// so we re-build the linkage.
|
||||
// WIP MB is this the right design? Or should we follow JPA and share these?
|
||||
// WIPMB is this the right design? Or should we follow JPA and share these?
|
||||
Map<String, List<String>> linkPathToParamName = new HashMap<>();
|
||||
for (String nextParamName : theNewParams.getPopulatedResourceLinkParameters()) {
|
||||
RuntimeSearchParam sp = myParams.get(nextParamName);
|
||||
|
|
|
@ -1,5 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.dao.search;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
|
|
|
@ -19,3 +19,23 @@
|
|||
* Activated by {@link ca.uhn.fhir.jpa.api.config.DaoConfig#setAdvancedLuceneIndexing(boolean)}.
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao.search;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
|
|
@ -23,15 +23,27 @@ package ca.uhn.fhir.jpa.entity;
|
|||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.persistence.*;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.FetchType;
|
||||
import javax.persistence.ForeignKey;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import java.io.Serializable;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
||||
@Entity
|
||||
@Table(name = "TRM_CONCEPT_DESIG", uniqueConstraints = {
|
||||
}, indexes = {
|
||||
@Table(name = "TRM_CONCEPT_DESIG", uniqueConstraints = { }, indexes = {
|
||||
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
|
||||
@Index(name = "FK_CONCEPTDESIG_CONCEPT", columnList = "CONCEPT_PID", unique = false)
|
||||
})
|
||||
public class TermConceptDesignation implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
|
|
@ -22,11 +22,27 @@ package ca.uhn.fhir.jpa.entity;
|
|||
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||
|
||||
import javax.persistence.*;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.EnumType;
|
||||
import javax.persistence.Enumerated;
|
||||
import javax.persistence.FetchType;
|
||||
import javax.persistence.ForeignKey;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import java.io.Serializable;
|
||||
|
||||
@Entity
|
||||
@Table(name = "TRM_CONCEPT_PC_LINK", indexes = {
|
||||
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
|
||||
@Index(name = "FK_TERM_CONCEPTPC_CHILD", columnList = "CHILD_PID", unique = false),
|
||||
@Index(name = "FK_TERM_CONCEPTPC_PARENT", columnList = "PARENT_PID", unique = false)
|
||||
})
|
||||
public class TermConceptParentChildLink implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
|
|
@ -36,6 +36,7 @@ import javax.persistence.ForeignKey;
|
|||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.Lob;
|
||||
import javax.persistence.ManyToOne;
|
||||
|
@ -48,7 +49,9 @@ import static org.apache.commons.lang3.StringUtils.left;
|
|||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
||||
@Entity
|
||||
@Table(name = "TRM_CONCEPT_PROPERTY", uniqueConstraints = {
|
||||
@Table(name = "TRM_CONCEPT_PROPERTY", uniqueConstraints = { }, indexes = {
|
||||
// must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
|
||||
@Index(name = "FK_CONCEPTPROP_CONCEPT", columnList = "CONCEPT_PID", unique = false)
|
||||
})
|
||||
public class TermConceptProperty implements Serializable {
|
||||
public static final int MAX_PROPTYPE_ENUM_LENGTH = 6;
|
||||
|
|
|
@ -51,6 +51,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
|
||||
private final Set<FlagEnum> myFlags;
|
||||
|
||||
// H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys
|
||||
public static final DriverTypeEnum[] NON_AUTOMATIC_FK_INDEX_PLATFORMS = new DriverTypeEnum[] {
|
||||
DriverTypeEnum.POSTGRES_9_4, DriverTypeEnum.ORACLE_12C, DriverTypeEnum.MSSQL_2012 };
|
||||
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -76,8 +81,44 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
init540(); // 20210218 - 20210520
|
||||
init550(); // 20210520 -
|
||||
init560(); // 20211027 -
|
||||
init570(); // 20211102 -
|
||||
}
|
||||
|
||||
|
||||
private void init570() {
|
||||
Builder version = forVersion(VersionEnum.V5_7_0);
|
||||
|
||||
// both indexes must have same name that indexed FK or SchemaMigrationTest complains because H2 sets this index automatically
|
||||
|
||||
version.onTable("TRM_CONCEPT_PROPERTY")
|
||||
.addIndex("20211102.1", "FK_CONCEPTPROP_CONCEPT")
|
||||
.unique(false)
|
||||
.withColumns("CONCEPT_PID")
|
||||
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
|
||||
|
||||
version.onTable("TRM_CONCEPT_DESIG")
|
||||
.addIndex("20211102.2", "FK_CONCEPTDESIG_CONCEPT")
|
||||
.unique(false)
|
||||
.withColumns("CONCEPT_PID")
|
||||
// H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys
|
||||
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
|
||||
|
||||
version.onTable("TRM_CONCEPT_PC_LINK")
|
||||
.addIndex("20211102.3", "FK_TERM_CONCEPTPC_CHILD")
|
||||
.unique(false)
|
||||
.withColumns("CHILD_PID")
|
||||
// H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys
|
||||
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
|
||||
|
||||
version.onTable("TRM_CONCEPT_PC_LINK")
|
||||
.addIndex("20211102.4", "FK_TERM_CONCEPTPC_PARENT")
|
||||
.unique(false)
|
||||
.withColumns("PARENT_PID")
|
||||
// H2, Derby, MariaDB, and MySql automatically add indexes to foreign keys
|
||||
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
|
||||
}
|
||||
|
||||
|
||||
private void init560() {
|
||||
init560_20211027();
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.INpmPackageDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionResourceDao;
|
||||
|
@ -123,6 +124,9 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
|
||||
@Autowired(required = false)//It is possible that some implementers will not create such a bean.
|
||||
private IBinaryStorageSvc myBinaryStorageSvc;
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public NpmPackage loadPackageFromCacheOnly(String theId, @Nullable String theVersion) {
|
||||
|
@ -172,13 +176,37 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
private IHapiPackageCacheManager.PackageContents loadPackageContents(NpmPackageVersionEntity thePackageVersion) {
|
||||
IFhirResourceDao<? extends IBaseBinary> binaryDao = getBinaryDao();
|
||||
IBaseBinary binary = binaryDao.readByPid(new ResourcePersistentId(thePackageVersion.getPackageBinary().getId()));
|
||||
try {
|
||||
byte[] content = fetchBlobFromBinary(binary);
|
||||
PackageContents retVal = new PackageContents()
|
||||
.setBytes(content)
|
||||
.setPackageId(thePackageVersion.getPackageId())
|
||||
.setVersion(thePackageVersion.getVersionId())
|
||||
.setLastModified(thePackageVersion.getUpdatedTime());
|
||||
return retVal;
|
||||
} catch (IOException e) {
|
||||
throw new InternalErrorException("Failed to load package. There was a problem reading binaries", e);
|
||||
}
|
||||
}
|
||||
|
||||
PackageContents retVal = new PackageContents()
|
||||
.setBytes(binary.getContent())
|
||||
.setPackageId(thePackageVersion.getPackageId())
|
||||
.setVersion(thePackageVersion.getVersionId())
|
||||
.setLastModified(thePackageVersion.getUpdatedTime());
|
||||
return retVal;
|
||||
/**
|
||||
* Helper method which will attempt to use the IBinaryStorageSvc to resolve the binary blob if available. If
|
||||
* the bean is unavailable, fallback to assuming we are using an embedded base64 in the data element.
|
||||
* @param theBinary the Binary who's `data` blob you want to retrieve
|
||||
* @return a byte array containing the blob.
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
private byte[] fetchBlobFromBinary(IBaseBinary theBinary) throws IOException {
|
||||
if (myBinaryStorageSvc != null) {
|
||||
return myBinaryStorageSvc.fetchDataBlobFromBinary(theBinary);
|
||||
} else {
|
||||
byte[] value = BinaryUtil.getOrCreateData(myCtx, theBinary).getValue();
|
||||
if (value == null) {
|
||||
throw new InternalErrorException("Failed to fetch blob from Binary/" + theBinary.getIdElement());
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -487,14 +515,12 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
|
||||
private IBaseResource loadPackageEntity(NpmPackageVersionResourceEntity contents) {
|
||||
try {
|
||||
|
||||
ResourcePersistentId binaryPid = new ResourcePersistentId(contents.getResourceBinary().getId());
|
||||
IBaseBinary binary = getBinaryDao().readByPid(binaryPid);
|
||||
byte[] resourceContentsBytes = BinaryUtil.getOrCreateData(myCtx, binary).getValue();
|
||||
String resourceContents = new String(resourceContentsBytes, StandardCharsets.UTF_8);
|
||||
|
||||
FhirContext packageContext = getFhirContext(contents.getFhirVersion());
|
||||
return EncodingEnum.detectEncoding(resourceContents).newParser(packageContext).parseResource(resourceContents);
|
||||
ResourcePersistentId binaryPid = new ResourcePersistentId(contents.getResourceBinary().getId());
|
||||
IBaseBinary binary = getBinaryDao().readByPid(binaryPid);
|
||||
byte[] resourceContentsBytes= fetchBlobFromBinary(binary);
|
||||
String resourceContents = new String(resourceContentsBytes, StandardCharsets.UTF_8);
|
||||
FhirContext packageContext = getFhirContext(contents.getFhirVersion());
|
||||
return EncodingEnum.detectEncoding(resourceContents).newParser(packageContext).parseResource(resourceContents);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Failed to load package resource " + contents, e);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
|
@ -59,8 +60,9 @@ import org.hl7.fhir.r4.model.ConceptMap;
|
|||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
@ -86,6 +88,7 @@ import java.util.UUID;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
|
||||
|
@ -108,8 +111,6 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
@Autowired
|
||||
protected IdHelperService myIdHelperService;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
@Autowired
|
||||
private ITermVersionAdapterSvc myTerminologyVersionAdapterSvc;
|
||||
|
@ -124,6 +125,13 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myJobSubmitter;
|
||||
|
||||
@Autowired @Qualifier(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME)
|
||||
private Job myTermCodeSystemVersionDeleteJob;
|
||||
|
||||
|
||||
@Override
|
||||
public ResourcePersistentId getValueSetResourcePid(IIdType theIdType) {
|
||||
return myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), theIdType.getResourceType(), theIdType.getIdPart());
|
||||
|
@ -262,44 +270,6 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
return childTermConcepts;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
|
||||
|
||||
myEntityManager.flush();
|
||||
TermCodeSystem cs = myCodeSystemDao.findById(theCodeSystem.getPid()).orElseThrow(IllegalStateException::new);
|
||||
cs.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(cs);
|
||||
myCodeSystemDao.flush();
|
||||
|
||||
List<TermCodeSystemVersion> codeSystemVersions = myCodeSystemVersionDao.findByCodeSystemPid(theCodeSystem.getPid());
|
||||
List<Long> codeSystemVersionPids = codeSystemVersions
|
||||
.stream()
|
||||
.map(TermCodeSystemVersion::getPid)
|
||||
.collect(Collectors.toList());
|
||||
for (Long next : codeSystemVersionPids) {
|
||||
deleteCodeSystemVersion(next);
|
||||
}
|
||||
|
||||
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
|
||||
myCodeSystemDao.delete(theCodeSystem);
|
||||
myEntityManager.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
// Delete TermCodeSystemVersion
|
||||
ourLog.info(" * Deleting TermCodeSystemVersion {}", theCodeSystemVersion.getCodeSystemVersionId());
|
||||
deleteCodeSystemVersion(theCodeSystemVersion.getPid());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of saved concepts
|
||||
*/
|
||||
|
@ -512,26 +482,6 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
return existing;
|
||||
}
|
||||
|
||||
private void deleteCodeSystemVersion(final Long theCodeSystemVersionPid) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
ourLog.info(" * Marking code system version {} for deletion", theCodeSystemVersionPid);
|
||||
|
||||
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
|
||||
if (codeSystemOpt.isPresent()) {
|
||||
TermCodeSystem codeSystem = codeSystemOpt.get();
|
||||
if (codeSystem.getCurrentVersion() != null && codeSystem.getCurrentVersion().getPid().equals(theCodeSystemVersionPid)) {
|
||||
ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
|
||||
codeSystem.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(codeSystem);
|
||||
}
|
||||
}
|
||||
|
||||
TermCodeSystemVersion codeSystemVersion = myCodeSystemVersionDao.findById(theCodeSystemVersionPid).orElseThrow(() -> new IllegalStateException());
|
||||
codeSystemVersion.setCodeSystemVersionId("DELETED_" + UUID.randomUUID().toString());
|
||||
myCodeSystemVersionDao.save(codeSystemVersion);
|
||||
|
||||
myDeferredStorageSvc.deleteCodeSystemVersion(codeSystemVersion);
|
||||
}
|
||||
|
||||
private void validateDstu3OrNewer() {
|
||||
Validate.isTrue(myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3), "Terminology operations only supported in DSTU3+ mode");
|
||||
|
|
|
@ -20,12 +20,11 @@ package ca.uhn.fhir.jpa.term;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
|
@ -37,6 +36,7 @@ import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
|||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -45,10 +45,15 @@ import org.hl7.fhir.r4.model.ValueSet;
|
|||
import org.quartz.JobExecutionContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.launch.JobExecutionNotRunningException;
|
||||
import org.springframework.batch.core.launch.JobOperator;
|
||||
import org.springframework.batch.core.launch.NoSuchJobExecutionException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
@ -59,13 +64,17 @@ import javax.annotation.PostConstruct;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Queue;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_ID;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_VERSION_ID;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
|
||||
public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermDeferredStorageSvcImpl.class);
|
||||
|
@ -75,6 +84,9 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
final private List<ValueSet> myDeferredValueSets = Collections.synchronizedList(new ArrayList<>());
|
||||
final private List<ConceptMap> myDeferredConceptMaps = Collections.synchronizedList(new ArrayList<>());
|
||||
final private List<TermConceptParentChildLink> myConceptLinksToSaveLater = Collections.synchronizedList(new ArrayList<>());
|
||||
final private List<JobExecution> myCurrentJobExecutions = Collections.synchronizedList(new ArrayList<>());
|
||||
|
||||
|
||||
@Autowired
|
||||
protected ITermConceptDao myConceptDao;
|
||||
@Autowired
|
||||
|
@ -83,10 +95,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
protected ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTransactionMgr;
|
||||
@Autowired
|
||||
protected ITermConceptPropertyDao myConceptPropertyDao;
|
||||
@Autowired
|
||||
protected ITermConceptDesignationDao myConceptDesignationDao;
|
||||
private boolean myProcessDeferred = true;
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
|
@ -97,6 +105,19 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
@Autowired
|
||||
private ITermCodeSystemStorageSvc myCodeSystemStorageSvc;
|
||||
|
||||
@Autowired
|
||||
private IBatchJobSubmitter myJobSubmitter;
|
||||
|
||||
@Autowired
|
||||
private JobOperator myJobOperator;
|
||||
|
||||
@Autowired @Qualifier(TERM_CODE_SYSTEM_DELETE_JOB_NAME)
|
||||
private org.springframework.batch.core.Job myTermCodeSystemDeleteJob;
|
||||
|
||||
@Autowired @Qualifier(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME)
|
||||
private org.springframework.batch.core.Job myTermCodeSystemVersionDeleteJob;
|
||||
|
||||
|
||||
@Override
|
||||
public void addConceptToStorageQueue(TermConcept theConcept) {
|
||||
Validate.notNull(theConcept);
|
||||
|
@ -122,28 +143,26 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
|
||||
theCodeSystem.setCodeSystemUri("urn:uuid:" + UUID.randomUUID().toString());
|
||||
myCodeSystemDao.save(theCodeSystem);
|
||||
myDeferredCodeSystemsDeletions.add(theCodeSystem);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public void deleteCodeSystemForResource(ResourceTable theCodeSystemToDelete) {
|
||||
// there are use cases (at least in tests) where the code system is not present for the resource but versions are,
|
||||
// so, as code system deletion also deletes versions, we try the system first but if not present we also try versions
|
||||
TermCodeSystem termCodeSystemToDelete = myCodeSystemDao.findByResourcePid(theCodeSystemToDelete.getResourceId());
|
||||
if (termCodeSystemToDelete != null) {
|
||||
termCodeSystemToDelete.setCodeSystemUri("urn:uuid:" + UUID.randomUUID());
|
||||
myCodeSystemDao.save(termCodeSystemToDelete);
|
||||
myDeferredCodeSystemsDeletions.add(termCodeSystemToDelete);
|
||||
return;
|
||||
}
|
||||
|
||||
List<TermCodeSystemVersion> codeSystemVersionsToDelete = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemToDelete.getResourceId());
|
||||
for (TermCodeSystemVersion codeSystemVersionToDelete : codeSystemVersionsToDelete) {
|
||||
if (codeSystemVersionToDelete != null) {
|
||||
myDeferredCodeSystemVersionsDeletions.add(codeSystemVersionToDelete);
|
||||
}
|
||||
}
|
||||
TermCodeSystem codeSystemToDelete = myCodeSystemDao.findByResourcePid(theCodeSystemToDelete.getResourceId());
|
||||
if (codeSystemToDelete != null) {
|
||||
deleteCodeSystem(codeSystemToDelete);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setProcessDeferred(boolean theProcessDeferred) {
|
||||
myProcessDeferred = theProcessDeferred;
|
||||
|
@ -236,14 +255,26 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
myDeferredCodeSystemsDeletions.clear();
|
||||
myConceptLinksToSaveLater.clear();
|
||||
myDeferredCodeSystemVersionsDeletions.clear();
|
||||
clearJobExecutions();
|
||||
}
|
||||
|
||||
private void runInTransaction(Runnable theRunnable) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
new TransactionTemplate(myTransactionMgr).executeWithoutResult(tx -> theRunnable.run());
|
||||
private void clearJobExecutions() {
|
||||
for (JobExecution jobExecution : myCurrentJobExecutions) {
|
||||
if (! jobExecution.isRunning()) { continue; }
|
||||
|
||||
try {
|
||||
myJobOperator.stop(jobExecution.getId());
|
||||
|
||||
} catch (NoSuchJobExecutionException | JobExecutionNotRunningException theE) {
|
||||
ourLog.error("Couldn't stop job execution {}: {}", jobExecution.getId(), theE);
|
||||
}
|
||||
}
|
||||
|
||||
myCurrentJobExecutions.clear();
|
||||
}
|
||||
|
||||
|
||||
private <T> T runInTransaction(Supplier<T> theRunnable) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
|
@ -315,98 +346,53 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
return !myDeferredCodeSystemVersionsDeletions.isEmpty();
|
||||
}
|
||||
|
||||
|
||||
private void processDeferredCodeSystemDeletions() {
|
||||
for (TermCodeSystem next : myDeferredCodeSystemsDeletions) {
|
||||
myCodeSystemStorageSvc.deleteCodeSystem(next);
|
||||
deleteTermCodeSystemOffline(next.getPid());
|
||||
}
|
||||
myDeferredCodeSystemsDeletions.clear();
|
||||
}
|
||||
|
||||
|
||||
private void processDeferredCodeSystemVersionDeletions() {
|
||||
for (TermCodeSystemVersion next : myDeferredCodeSystemVersionsDeletions) {
|
||||
processDeferredCodeSystemVersionDeletions(next.getPid());
|
||||
deleteTermCodeSystemVersionOffline(next.getPid());
|
||||
}
|
||||
|
||||
myDeferredCodeSystemVersionsDeletions.clear();
|
||||
}
|
||||
|
||||
private void processDeferredCodeSystemVersionDeletions(long theCodeSystemVersionPid) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
ourLog.info(" * Deleting CodeSystemVersion[id={}]", theCodeSystemVersionPid);
|
||||
|
||||
PageRequest page1000 = PageRequest.of(0, 1000);
|
||||
private void deleteTermCodeSystemVersionOffline(Long theCodeSystemVersionPid) {
|
||||
JobParameters jobParameters = new JobParameters(
|
||||
Collections.singletonMap(
|
||||
JOB_PARAM_CODE_SYSTEM_VERSION_ID, new JobParameter(theCodeSystemVersionPid, true) ));
|
||||
|
||||
// Parent/Child links
|
||||
{
|
||||
String descriptor = "parent/child links";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptParentChildLinkDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptParentChildLinkDao);
|
||||
try {
|
||||
|
||||
JobExecution jobExecution = myJobSubmitter.runJob(myTermCodeSystemVersionDeleteJob, jobParameters);
|
||||
myCurrentJobExecutions.add(jobExecution);
|
||||
|
||||
} catch (JobParametersInvalidException theE) {
|
||||
throw new InternalErrorException("Offline job submission for TermCodeSystemVersion: " +
|
||||
theCodeSystemVersionPid + " failed: " + theE);
|
||||
}
|
||||
|
||||
// Properties
|
||||
{
|
||||
String descriptor = "concept properties";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptPropertyDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptPropertyDao);
|
||||
}
|
||||
|
||||
// Designations
|
||||
{
|
||||
String descriptor = "concept designations";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptDesignationDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptDesignationDao);
|
||||
}
|
||||
|
||||
// Concepts
|
||||
{
|
||||
String descriptor = "concepts";
|
||||
// For some reason, concepts are much slower to delete, so use a smaller batch size
|
||||
PageRequest page100 = PageRequest.of(0, 100);
|
||||
Supplier<Slice<Long>> loader = () -> myConceptDao.findIdsByCodeSystemVersion(page100, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptDao);
|
||||
}
|
||||
|
||||
runInTransaction(() -> {
|
||||
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
|
||||
if (codeSystemOpt.isPresent()) {
|
||||
TermCodeSystem codeSystem = codeSystemOpt.get();
|
||||
ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
|
||||
codeSystem.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(codeSystem);
|
||||
}
|
||||
|
||||
ourLog.info(" * Deleting code system version");
|
||||
Optional<TermCodeSystemVersion> csv = myCodeSystemVersionDao.findById(theCodeSystemVersionPid);
|
||||
if (csv.isPresent()) {
|
||||
myCodeSystemVersionDao.delete(csv.get());
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
private <T> void doDelete(String theDescriptor, Supplier<Slice<Long>> theLoader, Supplier<Integer> theCounter, JpaRepository<T, Long> theDao) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
int count;
|
||||
ourLog.info(" * Deleting {}", theDescriptor);
|
||||
int totalCount = runInTransaction(theCounter);
|
||||
StopWatch sw = new StopWatch();
|
||||
count = 0;
|
||||
while (true) {
|
||||
Slice<Long> link = runInTransaction(theLoader);
|
||||
if (!link.hasContent()) {
|
||||
break;
|
||||
}
|
||||
private void deleteTermCodeSystemOffline(Long theCodeSystemPid) {
|
||||
JobParameters jobParameters = new JobParameters(
|
||||
Collections.singletonMap(
|
||||
JOB_PARAM_CODE_SYSTEM_ID, new JobParameter(theCodeSystemPid, true) ));
|
||||
|
||||
runInTransaction(() -> link.forEach(theDao::deleteById));
|
||||
try {
|
||||
|
||||
count += link.getNumberOfElements();
|
||||
ourLog.info(" * {} {} deleted ({}/{}) remaining - {}/sec - ETA: {}", count, theDescriptor, count, totalCount, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
|
||||
JobExecution jobExecution = myJobSubmitter.runJob(myTermCodeSystemDeleteJob, jobParameters);
|
||||
myCurrentJobExecutions.add(jobExecution);
|
||||
|
||||
} catch (JobParametersInvalidException theE) {
|
||||
throw new InternalErrorException("Offline job submission for TermCodeSystem: " +
|
||||
theCodeSystemPid + " failed: " + theE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -419,9 +405,14 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
retVal &= !isDeferredValueSets();
|
||||
retVal &= !isDeferredConceptMaps();
|
||||
retVal &= !isDeferredCodeSystemDeletions();
|
||||
retVal &= !isJobsExecuting();
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private boolean isJobsExecuting() {
|
||||
return myCurrentJobExecutions.stream().anyMatch(JobExecution::isRunning);
|
||||
}
|
||||
|
||||
|
||||
private void saveConceptLink(TermConceptParentChildLink next) {
|
||||
if (next.getId() == null) {
|
||||
|
|
|
@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.term.api;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -51,11 +50,6 @@ public interface ITermCodeSystemStorageSvc {
|
|||
(boolean) theRequestDetails.getUserData().getOrDefault(MAKE_LOADING_VERSION_CURRENT, Boolean.TRUE);
|
||||
}
|
||||
|
||||
void deleteCodeSystem(TermCodeSystem theCodeSystem);
|
||||
|
||||
void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion);
|
||||
|
||||
|
||||
void storeNewCodeSystemVersion(ResourcePersistentId theCodeSystemResourcePid, String theSystemUri, String theSystemName,
|
||||
String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable,
|
||||
RequestDetails theRequestDetails);
|
||||
|
|
|
@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.term.api;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
|
@ -54,8 +53,6 @@ public interface ITermDeferredStorageSvc {
|
|||
|
||||
void addValueSetsToStorageQueue(List<ValueSet> theValueSets);
|
||||
|
||||
void deleteCodeSystem(TermCodeSystem theCodeSystem);
|
||||
|
||||
void deleteCodeSystemForResource(ResourceTable theCodeSystemResourceToDelete);
|
||||
|
||||
void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion);
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
/**
|
||||
* Configuration artifacts common to TermCodeSystemDeleteJobConfig and TermCodeSystemVersionDeleteJobConfig
|
||||
**/
|
||||
@Configuration
|
||||
public class BaseTermCodeSystemDeleteJobConfig {
|
||||
|
||||
protected static final int TERM_CONCEPT_DELETE_TIMEOUT = 60 * 2; // two minutes
|
||||
|
||||
@Autowired
|
||||
protected JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
protected StepBuilderFactory myStepBuilderFactory;
|
||||
|
||||
|
||||
@Bean
|
||||
public BatchTermCodeSystemVersionDeleteWriter batchTermCodeSystemVersionDeleteWriter() {
|
||||
return new BatchTermCodeSystemVersionDeleteWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public BatchConceptRelationsDeleteWriter batchConceptRelationsDeleteWriter() {
|
||||
return new BatchConceptRelationsDeleteWriter();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public BatchTermConceptsDeleteWriter batchTermConceptsDeleteWriter() {
|
||||
return new BatchTermConceptsDeleteWriter();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
import java.util.List;
|
||||
|
||||
public class BatchConceptRelationsDeleteWriter implements ItemWriter<Long> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BatchConceptRelationsDeleteWriter.class);
|
||||
|
||||
private static final DecimalFormat ourDecimalFormat = new DecimalFormat("#,###");
|
||||
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
|
||||
@Autowired
|
||||
private ITermConceptPropertyDao myConceptPropertyDao;
|
||||
|
||||
@Autowired
|
||||
private ITermConceptDesignationDao myConceptDesignationDao;
|
||||
|
||||
|
||||
@Override
|
||||
public void write(List<? extends Long> theTermCodeSystemVersionPidList) throws Exception {
|
||||
// receives input in chunks of size one
|
||||
long codeSystemVersionId = theTermCodeSystemVersionPidList.get(0);
|
||||
|
||||
ourLog.info("Deleting term code links");
|
||||
int deletedLinks = myConceptParentChildLinkDao.deleteByCodeSystemVersion(codeSystemVersionId);
|
||||
ourLog.info("Deleted {} term code links", ourDecimalFormat.format(deletedLinks));
|
||||
|
||||
ourLog.info("Deleting term code properties");
|
||||
int deletedProperties = myConceptPropertyDao.deleteByCodeSystemVersion(codeSystemVersionId);
|
||||
ourLog.info("Deleted {} term code properties", ourDecimalFormat.format(deletedProperties));
|
||||
|
||||
ourLog.info("Deleting concept designations");
|
||||
int deletedDesignations = myConceptDesignationDao.deleteByCodeSystemVersion(codeSystemVersionId);
|
||||
ourLog.info("Deleted {} concept designations", ourDecimalFormat.format(deletedDesignations));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_VERSION_ID;
|
||||
|
||||
/**
|
||||
* This reader works as a pass-through by passing the received parameter once to the writer,
|
||||
* in order to share the writer functionality between two jobs
|
||||
*/
|
||||
public class BatchTermCodeSystemUniqueVersionDeleteReader implements ItemReader<Long> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BatchTermCodeSystemUniqueVersionDeleteReader.class);
|
||||
|
||||
@Value("#{jobParameters['" + JOB_PARAM_CODE_SYSTEM_VERSION_ID + "']}")
|
||||
private Long myTermCodeSystemVersionPid;
|
||||
|
||||
// indicates if the parameter was already passed once to the writer, which indicates end of task
|
||||
private boolean myParameterPassed;
|
||||
|
||||
|
||||
@Override
|
||||
public Long read() throws Exception {
|
||||
if ( ! myParameterPassed) {
|
||||
myParameterPassed = true;
|
||||
return myTermCodeSystemVersionPid;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemReader;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_ID;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class BatchTermCodeSystemVersionDeleteReader implements ItemReader<Long> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BatchTermCodeSystemVersionDeleteReader.class);
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemVersionDao myTermCodeSystemVersionDao;
|
||||
|
||||
@Value("#{jobParameters['" + JOB_PARAM_CODE_SYSTEM_ID + "']}")
|
||||
private Long myTermCodeSystemPid;
|
||||
|
||||
private List<Long> myTermCodeSystemVersionPidList;
|
||||
private int myCurrentIdx = 0;
|
||||
|
||||
|
||||
@Override
|
||||
public Long read() throws Exception {
|
||||
if (myTermCodeSystemVersionPidList == null) {
|
||||
myTermCodeSystemVersionPidList = myTermCodeSystemVersionDao.findSortedPidsByCodeSystemPid(myTermCodeSystemPid);
|
||||
}
|
||||
|
||||
if (myTermCodeSystemVersionPidList.isEmpty()) {
|
||||
// nothing to process
|
||||
ourLog.info("Nothing to process");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (myCurrentIdx >= myTermCodeSystemVersionPidList.size()) {
|
||||
// nothing else to process
|
||||
ourLog.info("No more versions to process");
|
||||
return null;
|
||||
}
|
||||
|
||||
// still processing elements
|
||||
long TermCodeSystemVersionPid = myTermCodeSystemVersionPidList.get(myCurrentIdx++);
|
||||
ourLog.info("Passing termCodeSystemVersionPid: {} to writer", TermCodeSystemVersionPid);
|
||||
return TermCodeSystemVersionPid;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
@Component
|
||||
public class BatchTermCodeSystemVersionDeleteWriter implements ItemWriter<Long> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BatchTermCodeSystemVersionDeleteWriter.class);
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemDao myCodeSystemDao;
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemVersionDao myTermCodeSystemVersionDao;
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public void write(List<? extends Long> theTermCodeSystemVersionPidList) throws Exception {
|
||||
// receives input in chunks of size one
|
||||
long codeSystemVersionId = theTermCodeSystemVersionPidList.get(0);
|
||||
|
||||
ourLog.debug("Executing for codeSystemVersionId: {}", codeSystemVersionId);
|
||||
|
||||
// if TermCodeSystemVersion being deleted is current, disconnect it form TermCodeSystem
|
||||
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(codeSystemVersionId);
|
||||
if (codeSystemOpt.isPresent()) {
|
||||
TermCodeSystem codeSystem = codeSystemOpt.get();
|
||||
ourLog.info("Removing code system version: {} as current version of code system: {}", codeSystemVersionId, codeSystem.getPid());
|
||||
codeSystem.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(codeSystem);
|
||||
}
|
||||
|
||||
ourLog.info("Deleting code system version: {}", codeSystemVersionId);
|
||||
Optional<TermCodeSystemVersion> csv = myTermCodeSystemVersionDao.findById(codeSystemVersionId);
|
||||
csv.ifPresent(theTermCodeSystemVersion -> {
|
||||
myTermCodeSystemVersionDao.delete(theTermCodeSystemVersion);
|
||||
ourLog.info("Code system version: {} deleted", codeSystemVersionId);
|
||||
});
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.item.ItemWriter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
public class BatchTermConceptsDeleteWriter implements ItemWriter<Long> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BatchTermConceptsDeleteWriter.class);
|
||||
|
||||
private static final DecimalFormat ourDecimalFormat = new DecimalFormat("#,###");
|
||||
|
||||
@Autowired
|
||||
private ITermConceptDao myConceptDao;
|
||||
|
||||
|
||||
@Override
|
||||
public void write(List<? extends Long> theTermCodeSystemVersionPidList) throws Exception {
|
||||
// receives input in chunks of size one
|
||||
long codeSystemVersionId = theTermCodeSystemVersionPidList.get(0);
|
||||
|
||||
ourLog.info("Deleting concepts");
|
||||
int deletedConcepts = myConceptDao.deleteByCodeSystemVersion(codeSystemVersionId);
|
||||
ourLog.info("Deleted {} concepts", ourDecimalFormat.format(deletedConcepts));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,122 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.transaction.interceptor.DefaultTransactionAttribute;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_DELETE_STEP_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_STEP_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CONCEPTS_DELETE_STEP_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CONCEPT_RELATIONS_DELETE_STEP_NAME;
|
||||
|
||||
/**
|
||||
* Configuration for batch job which deletes a TermCodeSystem and its related TermCodeSystemVersion(s),
|
||||
* TermConceptProperty(es), TermConceptDesignation(s), and TermConceptParentChildLink(s)
|
||||
**/
|
||||
@Configuration
|
||||
public class TermCodeSystemDeleteJobConfig extends BaseTermCodeSystemDeleteJobConfig {
|
||||
|
||||
|
||||
@Bean(name = TERM_CODE_SYSTEM_DELETE_JOB_NAME)
|
||||
@Lazy
|
||||
public Job termCodeSystemDeleteJob() {
|
||||
return myJobBuilderFactory.get(TERM_CODE_SYSTEM_DELETE_JOB_NAME)
|
||||
.validator(termCodeSystemDeleteJobParameterValidator())
|
||||
.start(termConceptRelationsDeleteStep())
|
||||
.next(termConceptsDeleteStep())
|
||||
.next(termCodeSystemVersionDeleteStep())
|
||||
.next(termCodeSystemDeleteStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator termCodeSystemDeleteJobParameterValidator() {
|
||||
return new TermCodeSystemDeleteJobParameterValidator();
|
||||
}
|
||||
|
||||
/**
|
||||
* This steps deletes TermConceptParentChildLink(s), TermConceptProperty(es) and TermConceptDesignation(s)
|
||||
* related to TermConcept(s) of the TermCodeSystemVersion being deleted
|
||||
*/
|
||||
@Bean(name = TERM_CONCEPT_RELATIONS_DELETE_STEP_NAME)
|
||||
public Step termConceptRelationsDeleteStep() {
|
||||
return myStepBuilderFactory.get(TERM_CONCEPT_RELATIONS_DELETE_STEP_NAME)
|
||||
.<Long, Long>chunk(1)
|
||||
.reader(batchTermCodeSystemVersionDeleteReader())
|
||||
.writer(batchConceptRelationsDeleteWriter())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* This steps deletes TermConcept(s) of the TermCodeSystemVersion being deleted
|
||||
*/
|
||||
@Bean(name = TERM_CONCEPTS_DELETE_STEP_NAME)
|
||||
public Step termConceptsDeleteStep() {
|
||||
DefaultTransactionAttribute attribute = new DefaultTransactionAttribute();
|
||||
attribute.setTimeout(TERM_CONCEPT_DELETE_TIMEOUT);
|
||||
|
||||
return myStepBuilderFactory.get(TERM_CONCEPTS_DELETE_STEP_NAME)
|
||||
.<Long, Long>chunk(1)
|
||||
.reader(batchTermCodeSystemVersionDeleteReader())
|
||||
.writer(batchTermConceptsDeleteWriter())
|
||||
.transactionAttribute(attribute)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* This steps deletes the TermCodeSystemVersion
|
||||
*/
|
||||
@Bean(name = TERM_CODE_SYSTEM_VERSION_DELETE_STEP_NAME)
|
||||
public Step termCodeSystemVersionDeleteStep() {
|
||||
return myStepBuilderFactory.get(TERM_CODE_SYSTEM_VERSION_DELETE_STEP_NAME)
|
||||
.<Long, Long>chunk(1)
|
||||
.reader(batchTermCodeSystemVersionDeleteReader())
|
||||
.writer(batchTermCodeSystemVersionDeleteWriter())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean(name = TERM_CODE_SYSTEM_DELETE_STEP_NAME)
|
||||
public Step termCodeSystemDeleteStep() {
|
||||
return myStepBuilderFactory.get(TERM_CODE_SYSTEM_DELETE_STEP_NAME)
|
||||
.tasklet(termCodeSystemDeleteTasklet())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BatchTermCodeSystemVersionDeleteReader batchTermCodeSystemVersionDeleteReader() {
|
||||
return new BatchTermCodeSystemVersionDeleteReader();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public TermCodeSystemDeleteTasklet termCodeSystemDeleteTasklet() {
|
||||
return new TermCodeSystemDeleteTasklet();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_ID;
|
||||
|
||||
/**
|
||||
* Validates that a TermCodeSystem parameter is present
|
||||
*/
|
||||
public class TermCodeSystemDeleteJobParameterValidator implements JobParametersValidator {
|
||||
|
||||
@Override
|
||||
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
|
||||
if (theJobParameters == null) {
|
||||
throw new JobParametersInvalidException("This job needs Parameter: '" + JOB_PARAM_CODE_SYSTEM_ID + "'");
|
||||
}
|
||||
|
||||
if ( ! theJobParameters.getParameters().containsKey(JOB_PARAM_CODE_SYSTEM_ID)) {
|
||||
throw new JobParametersInvalidException("This job needs Parameter: '" + JOB_PARAM_CODE_SYSTEM_ID + "'");
|
||||
}
|
||||
|
||||
Long termCodeSystemPid = theJobParameters.getLong(JOB_PARAM_CODE_SYSTEM_ID);
|
||||
if (termCodeSystemPid == null) {
|
||||
throw new JobParametersInvalidException("'" + JOB_PARAM_CODE_SYSTEM_ID + "' parameter is null");
|
||||
}
|
||||
|
||||
if (termCodeSystemPid <= 0) {
|
||||
throw new JobParametersInvalidException("Invalid parameter '" + JOB_PARAM_CODE_SYSTEM_ID + "' value: " + termCodeSystemPid);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
import org.springframework.batch.core.step.tasklet.Tasklet;
|
||||
import org.springframework.batch.repeat.RepeatStatus;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_ID;
|
||||
|
||||
@Component
|
||||
public class TermCodeSystemDeleteTasklet implements Tasklet {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermCodeSystemDeleteTasklet.class);
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemDao myTermCodeSystemDao;
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
|
||||
@Override
|
||||
public RepeatStatus execute(@NotNull StepContribution contribution, ChunkContext context) throws Exception {
|
||||
long codeSystemPid = (Long) context.getStepContext().getJobParameters().get(JOB_PARAM_CODE_SYSTEM_ID);
|
||||
ourLog.info("Deleting code system {}", codeSystemPid);
|
||||
|
||||
myTermCodeSystemDao.findById(codeSystemPid).orElseThrow(IllegalStateException::new);
|
||||
myTermCodeSystemDao.deleteById(codeSystemPid);
|
||||
ourLog.info("Code system {} deleted", codeSystemPid);
|
||||
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.StepScope;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
import org.springframework.transaction.interceptor.DefaultTransactionAttribute;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_UNIQUE_VERSION_DELETE_STEP_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CONCEPTS_UNIQUE_VERSION_DELETE_STEP_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CONCEPT_RELATIONS_UNIQUE_VERSION_DELETE_STEP_NAME;
|
||||
|
||||
/**
|
||||
* Configuration for batch job which deletes a specific TermCodeSystemVersion and its related,
|
||||
* TermConceptProperty(es), TermConceptDesignation(s), and TermConceptParentChildLink(s)
|
||||
**/
|
||||
@Configuration
|
||||
public class TermCodeSystemVersionDeleteJobConfig extends BaseTermCodeSystemDeleteJobConfig {
|
||||
|
||||
|
||||
@Bean(name = TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME)
|
||||
@Lazy
|
||||
public Job termCodeSystemVersionDeleteJob() {
|
||||
return myJobBuilderFactory.get(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME)
|
||||
.validator(termCodeSystemVersionDeleteJobParameterValidator())
|
||||
.start(termConceptRelationsUniqueVersionDeleteStep())
|
||||
.next(termConceptsUniqueVersionDeleteStep())
|
||||
.next(termCodeSystemUniqueVersionDeleteStep())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public JobParametersValidator termCodeSystemVersionDeleteJobParameterValidator() {
|
||||
return new TermCodeSystemVersionDeleteJobParameterValidator();
|
||||
}
|
||||
|
||||
|
||||
@Bean(name = TERM_CONCEPT_RELATIONS_UNIQUE_VERSION_DELETE_STEP_NAME)
|
||||
public Step termConceptRelationsUniqueVersionDeleteStep() {
|
||||
return myStepBuilderFactory.get(TERM_CONCEPT_RELATIONS_UNIQUE_VERSION_DELETE_STEP_NAME)
|
||||
.<Long, Long>chunk(1)
|
||||
.reader(batchTermCodeSystemUniqueVersionDeleteReader())
|
||||
.writer(batchConceptRelationsDeleteWriter())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean(name = TERM_CONCEPTS_UNIQUE_VERSION_DELETE_STEP_NAME)
|
||||
public Step termConceptsUniqueVersionDeleteStep() {
|
||||
DefaultTransactionAttribute attribute = new DefaultTransactionAttribute();
|
||||
attribute.setTimeout(TERM_CONCEPT_DELETE_TIMEOUT);
|
||||
|
||||
return myStepBuilderFactory.get(TERM_CONCEPTS_UNIQUE_VERSION_DELETE_STEP_NAME)
|
||||
.<Long, Long>chunk(1)
|
||||
.reader(batchTermCodeSystemUniqueVersionDeleteReader())
|
||||
.writer(batchTermConceptsDeleteWriter())
|
||||
.transactionAttribute(attribute)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean(name = TERM_CODE_SYSTEM_UNIQUE_VERSION_DELETE_STEP_NAME)
|
||||
public Step termCodeSystemUniqueVersionDeleteStep() {
|
||||
return myStepBuilderFactory.get(TERM_CODE_SYSTEM_UNIQUE_VERSION_DELETE_STEP_NAME)
|
||||
.<Long, Long>chunk(1)
|
||||
.reader(batchTermCodeSystemUniqueVersionDeleteReader())
|
||||
.writer(batchTermCodeSystemVersionDeleteWriter())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@StepScope
|
||||
public BatchTermCodeSystemUniqueVersionDeleteReader batchTermCodeSystemUniqueVersionDeleteReader() {
|
||||
return new BatchTermCodeSystemUniqueVersionDeleteReader();
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.batch.core.JobParametersValidator;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_VERSION_ID;
|
||||
|
||||
/**
|
||||
* Validates that a TermCodeSystem parameter is present
|
||||
*/
|
||||
public class TermCodeSystemVersionDeleteJobParameterValidator implements JobParametersValidator {
|
||||
|
||||
@Override
|
||||
public void validate(JobParameters theJobParameters) throws JobParametersInvalidException {
|
||||
if (theJobParameters == null) {
|
||||
throw new JobParametersInvalidException("This job needs Parameter: '" + JOB_PARAM_CODE_SYSTEM_VERSION_ID + "'");
|
||||
}
|
||||
|
||||
if ( ! theJobParameters.getParameters().containsKey(JOB_PARAM_CODE_SYSTEM_VERSION_ID)) {
|
||||
throw new JobParametersInvalidException("This job needs Parameter: '" + JOB_PARAM_CODE_SYSTEM_VERSION_ID + "'");
|
||||
}
|
||||
|
||||
Long termCodeSystemPid = theJobParameters.getLong(JOB_PARAM_CODE_SYSTEM_VERSION_ID);
|
||||
if (termCodeSystemPid == null) {
|
||||
throw new JobParametersInvalidException("'" + JOB_PARAM_CODE_SYSTEM_VERSION_ID + "' parameter is null");
|
||||
}
|
||||
|
||||
if (termCodeSystemPid <= 0) {
|
||||
throw new JobParametersInvalidException(
|
||||
"Invalid parameter '" + JOB_PARAM_CODE_SYSTEM_VERSION_ID + "' value: " + termCodeSystemPid);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.StepContribution;
|
||||
import org.springframework.batch.core.scope.context.ChunkContext;
|
||||
import org.springframework.batch.core.step.tasklet.Tasklet;
|
||||
import org.springframework.batch.repeat.RepeatStatus;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_ID;
|
||||
|
||||
/**
|
||||
* Deletes the TermConcept(s) related to the TermCodeSystemVersion being deleted
|
||||
* Executes in its own step to be in own transaction because it is a DB-heavy operation
|
||||
*/
|
||||
@Component
|
||||
public class TermConceptDeleteTasklet implements Tasklet {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermConceptDeleteTasklet.class);
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemDao myTermCodeSystemDao;
|
||||
|
||||
@Autowired
|
||||
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
|
||||
@Override
|
||||
public RepeatStatus execute(@NotNull StepContribution contribution, ChunkContext context) throws Exception {
|
||||
long codeSystemPid = (Long) context.getStepContext().getJobParameters().get(JOB_PARAM_CODE_SYSTEM_ID);
|
||||
ourLog.info("Deleting code system {}", codeSystemPid);
|
||||
|
||||
myTermCodeSystemDao.findById(codeSystemPid).orElseThrow(IllegalStateException::new);
|
||||
myTermCodeSystemDao.deleteById(codeSystemPid);
|
||||
|
||||
return RepeatStatus.FINISHED;
|
||||
}
|
||||
|
||||
}
|
|
@ -172,6 +172,4 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
|
|||
|
||||
return requestValidator;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc;
|
||||
import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
|
||||
|
@ -70,4 +72,10 @@ public class TestJPAConfig {
|
|||
public BatchJobHelper batchJobHelper(JobExplorer theJobExplorer) {
|
||||
return new BatchJobHelper(theJobExplorer);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public IBinaryStorageSvc binaryStorage() {
|
||||
return new MemoryBinaryStorageSvcImpl();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,106 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.conformance.DateSearchTestCase;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.DateParam;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
/**
|
||||
* Run the tests defined in {@link DateSearchTestCase} in a DAO test as a @Nested suite.
|
||||
*/
|
||||
public abstract class BaseDAODateSearchTest {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseDAODateSearchTest.class);
|
||||
|
||||
/** Id of test Observation */
|
||||
IIdType myObservationId;
|
||||
|
||||
/**
|
||||
* Test for our date search operators.
|
||||
* <p>
|
||||
* Be careful - date searching is defined by set relations over intervals, not a simple number comparison.
|
||||
* See http://hl7.org/fhir/search.html#prefix for details.
|
||||
* <p>
|
||||
*
|
||||
* @param theResourceDate the date to use as Observation effective date
|
||||
* @param theQuery the query parameter value including prefix (e.g. eq2020-01-01)
|
||||
* @param theExpectedMatch true if tdheQuery should match theResourceDate.
|
||||
*/
|
||||
@ParameterizedTest
|
||||
// use @CsvSource to debug individual cases.
|
||||
//@CsvSource("2019-12-31T08:00:00,eq2020,false,inline,1")
|
||||
@MethodSource("dateSearchCases")
|
||||
public void testDateSearchMatching(String theResourceDate, String theQuery, Boolean theExpectedMatch, String theFileName, int theLineNumber) {
|
||||
if (isShouldSkip(theResourceDate, theQuery)) {
|
||||
return;
|
||||
}
|
||||
// setup
|
||||
createObservationWithEffectiveDate(theResourceDate);
|
||||
|
||||
// run the query
|
||||
boolean matched = isSearchMatch(theQuery);
|
||||
|
||||
String message =
|
||||
"Expected " + theQuery + " to " +
|
||||
(theExpectedMatch ? "" : "not ") + "match " + theResourceDate +
|
||||
" (" + theFileName + ":" + theLineNumber + ")"; // wrap this in () so tools recognize the line reference.
|
||||
assertEquals(theExpectedMatch, matched, message);
|
||||
}
|
||||
|
||||
protected boolean isShouldSkip(String theResourceDate, String theQuery) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// we need these from the test container
|
||||
abstract protected FhirContext getMyFhirCtx();
|
||||
abstract protected <T> T doInTransaction(TransactionCallback<T> daoMethodOutcomeTransactionCallback);
|
||||
abstract protected <T extends IBaseResource> IFhirResourceDao<T> getObservationDao();
|
||||
|
||||
protected void createObservationWithEffectiveDate(String theResourceDate) {
|
||||
IBaseResource obs = getMyFhirCtx().getResourceDefinition("Observation").newInstance();
|
||||
FhirTerser fhirTerser = getMyFhirCtx().newTerser();
|
||||
fhirTerser.addElement(obs, "effectiveDateTime", theResourceDate);
|
||||
ourLog.info("obs {}", getMyFhirCtx().newJsonParser().encodeResourceToString(obs));
|
||||
|
||||
DaoMethodOutcome createOutcome = doInTransaction(s -> getObservationDao().create(obs));
|
||||
myObservationId = createOutcome.getId();
|
||||
}
|
||||
|
||||
/**
|
||||
* Does the query string match the observation created during setup?
|
||||
*/
|
||||
protected boolean isSearchMatch(String theQuery) {
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
map.add(Observation.SP_DATE, new DateParam(theQuery));
|
||||
ourLog.info("Searching for observation {}", map);
|
||||
|
||||
IBundleProvider results = getObservationDao().search(map);
|
||||
|
||||
boolean matched = results.getAllResourceIds().contains(myObservationId.getIdPart());
|
||||
return matched;
|
||||
}
|
||||
|
||||
static List<Arguments> dateSearchCases() {
|
||||
return DateSearchTestCase.ourCases.stream()
|
||||
.map(DateSearchTestCase::toArguments)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,21 +1,28 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.hl7.fhir.dstu3.model.Enumerations;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
||||
|
||||
@Autowired private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@AfterAll
|
||||
public static void afterClassClearContext() {
|
||||
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
|
||||
|
@ -64,6 +71,7 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
|||
cs.addConcept().setCode("B");
|
||||
myCodeSystemDao.update(cs, mySrd);
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
runInTransaction(()->{
|
||||
assertEquals(2, myConceptDao.count());
|
||||
});
|
||||
|
@ -77,6 +85,7 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
|||
cs.addConcept().setCode("C");
|
||||
myCodeSystemDao.update(cs, mySrd);
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
runInTransaction(()->{
|
||||
assertEquals(1, myConceptDao.count());
|
||||
});
|
||||
|
@ -86,6 +95,7 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
|||
myCodeSystemDao.delete(id);
|
||||
});
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
runInTransaction(()->{
|
||||
assertEquals(0L, myConceptDao.count());
|
||||
});
|
||||
|
|
|
@ -17,11 +17,9 @@ import org.hl7.fhir.r4.model.OperationOutcome;
|
|||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r5.model.StructureDefinition;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.stubbing.Answer;
|
||||
import org.springframework.batch.core.BatchStatus;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -32,7 +30,6 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.mockito.ArgumentMatchers.nullable;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
class DeleteExpungeDaoTest extends BaseJpaR4Test {
|
||||
|
|
|
@ -13,6 +13,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
|||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.AuditEvent;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
|
@ -29,6 +30,7 @@ import org.junit.jupiter.api.AfterEach;
|
|||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
@ -650,4 +652,52 @@ public class FhirResourceDaoCreatePlaceholdersR4Test extends BaseJpaR4Test {
|
|||
assertNotNull(retObservation);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleVersionedReferencesToAutocreatedPlaceholder() {
|
||||
myDaoConfig.setAutoCreatePlaceholderReferenceTargets(true);
|
||||
HashSet<String> refPaths = new HashSet<>();
|
||||
refPaths.add("Observation.subject");
|
||||
myModelConfig.setAutoVersionReferenceAtPaths(refPaths);
|
||||
|
||||
|
||||
Observation obs1 = new Observation();
|
||||
obs1.setId("Observation/DEF1");
|
||||
Reference patientRef = new Reference("Patient/RED");
|
||||
obs1.setSubject(patientRef);
|
||||
BundleBuilder builder = new BundleBuilder(myFhirCtx);
|
||||
Observation obs2 = new Observation();
|
||||
obs2.setId("Observation/DEF2");
|
||||
obs2.setSubject(patientRef);
|
||||
builder.addTransactionUpdateEntry(obs1);
|
||||
builder.addTransactionUpdateEntry(obs2);
|
||||
|
||||
mySystemDao.transaction(new SystemRequestDetails(), (Bundle) builder.getBundle());
|
||||
|
||||
// verify links created to Patient placeholder from both Observations
|
||||
IBundleProvider outcome = myPatientDao.search(SearchParameterMap.newSynchronous().addRevInclude(IBaseResource.INCLUDE_ALL));
|
||||
assertEquals(3, outcome.getAllResources().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleReferencesToAutocreatedPlaceholder() {
|
||||
myDaoConfig.setAutoCreatePlaceholderReferenceTargets(true);
|
||||
|
||||
Observation obs1 = new Observation();
|
||||
obs1.setId("Observation/DEF1");
|
||||
Reference patientRef = new Reference("Patient/RED");
|
||||
obs1.setSubject(patientRef);
|
||||
BundleBuilder builder = new BundleBuilder(myFhirCtx);
|
||||
Observation obs2 = new Observation();
|
||||
obs2.setId("Observation/DEF2");
|
||||
obs2.setSubject(patientRef);
|
||||
builder.addTransactionUpdateEntry(obs1);
|
||||
builder.addTransactionUpdateEntry(obs2);
|
||||
|
||||
mySystemDao.transaction(new SystemRequestDetails(), (Bundle) builder.getBundle());
|
||||
|
||||
// verify links created to Patient placeholder from both Observations
|
||||
IBundleProvider outcome = myPatientDao.search(SearchParameterMap.newSynchronous().addRevInclude(IBaseResource.INCLUDE_ALL));
|
||||
assertEquals(3, outcome.getAllResources().size());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,15 +2,19 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
|
@ -18,6 +22,8 @@ import static org.junit.jupiter.api.Assertions.assertNull;
|
|||
|
||||
public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
|
||||
|
||||
@Autowired private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@Test
|
||||
public void testIndexContained() throws Exception {
|
||||
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(true);
|
||||
|
@ -58,6 +64,7 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
|
|||
|
||||
// Now the background scheduler will do its thing
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
runInTransaction(() -> {
|
||||
assertEquals(0, myTermCodeSystemDao.count());
|
||||
assertEquals(0, myTermCodeSystemVersionDao.count());
|
||||
|
@ -116,6 +123,7 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
|
|||
|
||||
// Now the background scheduler will do its thing
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
|
||||
// Entities for first resource should be gone now.
|
||||
runInTransaction(() -> {
|
||||
|
@ -150,6 +158,7 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
|
|||
|
||||
// Now the background scheduler will do its thing
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
|
||||
// The remaining versions and Code System entities should be gone now.
|
||||
runInTransaction(() -> {
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseDAODateSearchTest;
|
||||
import ca.uhn.fhir.jpa.dao.BaseDateSearchDaoTests;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
|
@ -5316,8 +5314,7 @@ public class FhirResourceDaoR4LegacySearchBuilderTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Nested
|
||||
public class DateSearchTests extends BaseDAODateSearchTest {
|
||||
|
||||
public class DateSearchTests extends BaseDateSearchDaoTests {
|
||||
/**
|
||||
* legacy builder didn't get the year/month date search fixes, so skip anything wider than a day.
|
||||
*/
|
||||
|
@ -5328,16 +5325,8 @@ public class FhirResourceDaoR4LegacySearchBuilderTest extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected FhirContext getMyFhirCtx() {
|
||||
return myFhirCtx;
|
||||
}
|
||||
@Override
|
||||
protected <T> T doInTransaction(TransactionCallback<T> theCallback) {
|
||||
return new TransactionTemplate(myTxManager).execute(theCallback);
|
||||
}
|
||||
@Override
|
||||
protected IFhirResourceDao<Observation> getObservationDao() {
|
||||
return myObservationDao;
|
||||
protected Fixture getFixture() {
|
||||
return new TestDataBuilderFixture(FhirResourceDaoR4LegacySearchBuilderTest.this, myObservationDao);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.config.TestR4WithLuceneDisabledConfig;
|
||||
import ca.uhn.fhir.jpa.dao.BaseDAODateSearchTest;
|
||||
import ca.uhn.fhir.jpa.dao.BaseDateSearchDaoTests;
|
||||
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.dao.DaoTestDataBuilder;
|
||||
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
@ -16,10 +19,6 @@ import org.springframework.test.annotation.DirtiesContext;
|
|||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {TestR4WithLuceneDisabledConfig.class})
|
||||
|
@ -33,6 +32,8 @@ public class FhirResourceDaoR4LuceneDisabledStandardQueries extends BaseJpaTest
|
|||
@Autowired
|
||||
@Qualifier("myObservationDaoR4")
|
||||
IFhirResourceDao<Observation> myObservationDao;
|
||||
@Autowired
|
||||
protected DaoRegistry myDaoRegistry;
|
||||
|
||||
@Override
|
||||
protected PlatformTransactionManager getTxManager() {
|
||||
|
@ -45,22 +46,11 @@ public class FhirResourceDaoR4LuceneDisabledStandardQueries extends BaseJpaTest
|
|||
}
|
||||
|
||||
@Nested
|
||||
public class DateSearchTests extends BaseDAODateSearchTest {
|
||||
public class DateSearchTests extends BaseDateSearchDaoTests {
|
||||
@Override
|
||||
protected FhirContext getMyFhirCtx() {
|
||||
return myFhirCtx;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected <T> T doInTransaction(TransactionCallback<T> theCallback) {
|
||||
return new TransactionTemplate(myTxManager).execute(
|
||||
theCallback
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IFhirResourceDao<Observation> getObservationDao() {
|
||||
return myObservationDao;
|
||||
protected Fixture getFixture() {
|
||||
DaoTestDataBuilder testDataBuilder = new DaoTestDataBuilder(myFhirCtx, myDaoRegistry, new SystemRequestDetails());
|
||||
return new TestDataBuilderFixture<>(testDataBuilder, myObservationDao);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -45,6 +45,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
@ -109,6 +110,7 @@ import org.junit.jupiter.api.AfterEach;
|
|||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
|
@ -128,6 +130,7 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
@ -155,6 +158,9 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4Test.class);
|
||||
|
||||
@Autowired
|
||||
private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@AfterEach
|
||||
public final void after() {
|
||||
myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences());
|
||||
|
@ -356,6 +362,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
myResourceReindexingSvc.markAllResourcesForReindexing();
|
||||
myResourceReindexingSvc.forceReindexingPass();
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(3L, myTermConceptDao.count());
|
||||
|
|
|
@ -120,7 +120,7 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
|
|||
myValidationSettings.setLocalReferenceValidationDefaultPolicy(IResourceValidator.ReferenceValidationPolicy.IGNORE);
|
||||
myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
|
||||
|
||||
myUnknownCodeSystemWarningValidationSupport.setAllowNonExistentCodeSystem(UnknownCodeSystemWarningValidationSupport.ALLOW_NON_EXISTENT_CODE_SYSTEM_DEFAULT);
|
||||
myUnknownCodeSystemWarningValidationSupport.setNonExistentCodeSystemSeverity(UnknownCodeSystemWarningValidationSupport.DEFAULT_SEVERITY);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -161,12 +161,53 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
private Observation createObservationForUnknownCodeSystemTest() {
|
||||
Observation obs = new Observation();
|
||||
obs.getMeta().addProfile("http://sd");
|
||||
obs.getText().setDivAsString("<div>Hello</div>");
|
||||
obs.getText().setStatus(Narrative.NarrativeStatus.GENERATED);
|
||||
obs.getCategoryFirstRep().addCoding().setSystem("http://terminology.hl7.org/CodeSystem/observation-category").setCode("vital-signs");
|
||||
obs.getCode().setText("hello");
|
||||
obs.setSubject(new Reference("Patient/123"));
|
||||
obs.addPerformer(new Reference("Practitioner/123"));
|
||||
obs.setEffective(DateTimeType.now());
|
||||
obs.setStatus(ObservationStatus.FINAL);
|
||||
return obs;
|
||||
}
|
||||
|
||||
/**
|
||||
* By default an unknown code system should fail vaildation
|
||||
* By default, an unknown code system should fail validation
|
||||
*/
|
||||
@Test
|
||||
public void testValidateCodeInValueSetWithUnknownCodeSystem_Warning() {
|
||||
myUnknownCodeSystemWarningValidationSupport.setAllowNonExistentCodeSystem(true);
|
||||
// set to warning
|
||||
myUnknownCodeSystemWarningValidationSupport.setNonExistentCodeSystemSeverity(IValidationSupport.IssueSeverity.WARNING);
|
||||
|
||||
createStructureDefWithBindingToUnknownCs();
|
||||
|
||||
Observation obs = createObservationForUnknownCodeSystemTest();
|
||||
|
||||
OperationOutcome oo;
|
||||
String encoded;
|
||||
|
||||
// Valid code
|
||||
obs.setValue(new Quantity().setSystem("http://cs").setCode("code1").setValue(123));
|
||||
oo = validateAndReturnOutcome(obs);
|
||||
encoded = encode(oo);
|
||||
ourLog.info(encoded);
|
||||
assertTrue(oo.getIssueFirstRep().getDiagnostics().contains("No issues detected during validation"));
|
||||
|
||||
// Invalid code
|
||||
obs.setValue(new Quantity().setSystem("http://cs").setCode("code99").setValue(123));
|
||||
oo = validateAndReturnOutcome(obs);
|
||||
encoded = encode(oo);
|
||||
ourLog.info(encoded);
|
||||
assertTrue(oo.getIssueFirstRep().getDiagnostics().contains("No issues detected during validation"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCodeInValueSetWithUnknownCodeSystem_Error() {
|
||||
myUnknownCodeSystemWarningValidationSupport.setNonExistentCodeSystemSeverity(IValidationSupport.IssueSeverity.ERROR);
|
||||
|
||||
createStructureDefWithBindingToUnknownCs();
|
||||
|
||||
|
@ -189,15 +230,17 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
|
|||
oo = validateAndReturnOutcome(obs);
|
||||
encoded = encode(oo);
|
||||
ourLog.info(encoded);
|
||||
assertEquals("No issues detected during validation", oo.getIssueFirstRep().getDiagnostics(), encoded);
|
||||
assertTrue(oo.getIssueFirstRep().getDiagnostics().contains("No issues detected during validation"));
|
||||
|
||||
|
||||
// Invalid code
|
||||
obs.setValue(new Quantity().setSystem("http://cs").setCode("code99").setValue(123));
|
||||
oo = validateAndReturnOutcome(obs);
|
||||
encoded = encode(oo);
|
||||
ourLog.info(encoded);
|
||||
assertEquals("No issues detected during validation", oo.getIssueFirstRep().getDiagnostics(), encoded);
|
||||
|
||||
assertTrue(oo.getIssueFirstRep()
|
||||
.getDiagnostics().contains("The code provided (http://cs#code99) is not in the value set http://vs, and a code from this value set is required: Unknown code 'http://cs#code99' for in-memory expansion of ValueSet 'http://vs'")
|
||||
);
|
||||
}
|
||||
|
||||
public void createStructureDefWithBindingToUnknownCs() {
|
||||
|
|
|
@ -2,22 +2,25 @@ package ca.uhn.fhir.jpa.dao.r5;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r5.model.CodeSystem;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
||||
public class FhirResourceDaoR5CodeSystemTest extends BaseJpaR5Test {
|
||||
|
||||
@Autowired private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@Test
|
||||
public void testDeleteLargeCompleteCodeSystem() {
|
||||
|
||||
|
@ -42,6 +45,7 @@ public class FhirResourceDaoR5CodeSystemTest extends BaseJpaR5Test {
|
|||
|
||||
// Now the background scheduler will do its thing
|
||||
myTermDeferredStorageSvc.saveDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
runInTransaction(() -> {
|
||||
assertEquals(0, myTermCodeSystemDao.count());
|
||||
assertEquals(0, myTermCodeSystemVersionDao.count());
|
||||
|
@ -100,6 +104,7 @@ public class FhirResourceDaoR5CodeSystemTest extends BaseJpaR5Test {
|
|||
|
||||
// Now the background scheduler will do its thing
|
||||
myTermDeferredStorageSvc.saveDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
|
||||
// Entities for first resource should be gone now.
|
||||
runInTransaction(() -> {
|
||||
|
@ -134,6 +139,7 @@ public class FhirResourceDaoR5CodeSystemTest extends BaseJpaR5Test {
|
|||
|
||||
// Now the background scheduler will do its thing
|
||||
myTermDeferredStorageSvc.saveDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
|
||||
// The remaining versions and Code System entities should be gone now.
|
||||
runInTransaction(() -> {
|
||||
|
|
|
@ -270,6 +270,78 @@ public class NpmR4Test extends BaseJpaR4Test {
|
|||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInstallR4PackageWithExternalizedBinaries() throws Exception {
|
||||
myDaoConfig.setAllowExternalReferences(true);
|
||||
|
||||
myInterceptorService.registerInterceptor(myBinaryStorageInterceptor);
|
||||
byte[] bytes = loadClasspathBytes("/packages/hl7.fhir.uv.shorthand-0.12.0.tgz");
|
||||
myFakeNpmServlet.myResponses.put("/hl7.fhir.uv.shorthand/0.12.0", bytes);
|
||||
|
||||
PackageInstallationSpec spec = new PackageInstallationSpec().setName("hl7.fhir.uv.shorthand").setVersion("0.12.0").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL);
|
||||
PackageInstallOutcomeJson outcome = myPackageInstallerSvc.install(spec);
|
||||
assertEquals(1, outcome.getResourcesInstalled().get("CodeSystem"));
|
||||
|
||||
// Be sure no further communication with the server
|
||||
JettyUtil.closeServer(myServer);
|
||||
|
||||
// Make sure we can fetch the package by ID and Version
|
||||
NpmPackage pkg = myPackageCacheManager.loadPackage("hl7.fhir.uv.shorthand", "0.12.0");
|
||||
assertEquals("Describes FHIR Shorthand (FSH), a domain-specific language (DSL) for defining the content of FHIR Implementation Guides (IG). (built Wed, Apr 1, 2020 17:24+0000+00:00)", pkg.description());
|
||||
|
||||
// Make sure we can fetch the package by ID
|
||||
pkg = myPackageCacheManager.loadPackage("hl7.fhir.uv.shorthand", null);
|
||||
assertEquals("0.12.0", pkg.version());
|
||||
assertEquals("Describes FHIR Shorthand (FSH), a domain-specific language (DSL) for defining the content of FHIR Implementation Guides (IG). (built Wed, Apr 1, 2020 17:24+0000+00:00)", pkg.description());
|
||||
|
||||
// Make sure DB rows were saved
|
||||
runInTransaction(() -> {
|
||||
NpmPackageEntity pkgEntity = myPackageDao.findByPackageId("hl7.fhir.uv.shorthand").orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals("hl7.fhir.uv.shorthand", pkgEntity.getPackageId());
|
||||
|
||||
NpmPackageVersionEntity versionEntity = myPackageVersionDao.findByPackageIdAndVersion("hl7.fhir.uv.shorthand", "0.12.0").orElseThrow(() -> new IllegalArgumentException());
|
||||
assertEquals("hl7.fhir.uv.shorthand", versionEntity.getPackageId());
|
||||
assertEquals("0.12.0", versionEntity.getVersionId());
|
||||
assertEquals(3001, versionEntity.getPackageSizeBytes());
|
||||
assertEquals(true, versionEntity.isCurrentVersion());
|
||||
assertEquals("hl7.fhir.uv.shorthand", versionEntity.getPackageId());
|
||||
assertEquals("4.0.1", versionEntity.getFhirVersionId());
|
||||
assertEquals(FhirVersionEnum.R4, versionEntity.getFhirVersion());
|
||||
|
||||
NpmPackageVersionResourceEntity resource = myPackageVersionResourceDao.findCurrentVersionByCanonicalUrl(Pageable.unpaged(), FhirVersionEnum.R4, "http://hl7.org/fhir/uv/shorthand/ImplementationGuide/hl7.fhir.uv.shorthand").getContent().get(0);
|
||||
assertEquals("http://hl7.org/fhir/uv/shorthand/ImplementationGuide/hl7.fhir.uv.shorthand", resource.getCanonicalUrl());
|
||||
assertEquals("0.12.0", resource.getCanonicalVersion());
|
||||
assertEquals("ImplementationGuide-hl7.fhir.uv.shorthand.json", resource.getFilename());
|
||||
assertEquals("4.0.1", resource.getFhirVersionId());
|
||||
assertEquals(FhirVersionEnum.R4, resource.getFhirVersion());
|
||||
assertEquals(6155, resource.getResSizeBytes());
|
||||
});
|
||||
|
||||
// Fetch resource by URL
|
||||
runInTransaction(() -> {
|
||||
IBaseResource asset = myPackageCacheManager.loadPackageAssetByUrl(FhirVersionEnum.R4, "http://hl7.org/fhir/uv/shorthand/ImplementationGuide/hl7.fhir.uv.shorthand");
|
||||
assertThat(myFhirCtx.newJsonParser().encodeResourceToString(asset), containsString("\"url\":\"http://hl7.org/fhir/uv/shorthand/ImplementationGuide/hl7.fhir.uv.shorthand\",\"version\":\"0.12.0\""));
|
||||
});
|
||||
|
||||
// Fetch resource by URL with version
|
||||
runInTransaction(() -> {
|
||||
IBaseResource asset = myPackageCacheManager.loadPackageAssetByUrl(FhirVersionEnum.R4, "http://hl7.org/fhir/uv/shorthand/ImplementationGuide/hl7.fhir.uv.shorthand|0.12.0");
|
||||
assertThat(myFhirCtx.newJsonParser().encodeResourceToString(asset), containsString("\"url\":\"http://hl7.org/fhir/uv/shorthand/ImplementationGuide/hl7.fhir.uv.shorthand\",\"version\":\"0.12.0\""));
|
||||
});
|
||||
|
||||
// Search for the installed resource
|
||||
runInTransaction(() -> {
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||
map.add(StructureDefinition.SP_URL, new UriParam("http://hl7.org/fhir/uv/shorthand/CodeSystem/shorthand-code-system"));
|
||||
IBundleProvider result = myCodeSystemDao.search(map);
|
||||
assertEquals(1, result.sizeOrThrowNpe());
|
||||
IBaseResource resource = result.getResources(0, 1).get(0);
|
||||
assertEquals("CodeSystem/shorthand-code-system/_history/1", resource.getIdElement().toString());
|
||||
});
|
||||
|
||||
myInterceptorService.unregisterInterceptor(myBinaryStorageInterceptor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNumericIdsInstalledWithNpmPrefix() throws Exception {
|
||||
myDaoConfig.setAllowExternalReferences(true);
|
||||
|
|
|
@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest;
|
|||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.hl7.fhir.dstu3.model.BooleanType;
|
||||
import org.hl7.fhir.dstu3.model.CodeSystem;
|
||||
import org.hl7.fhir.dstu3.model.CodeType;
|
||||
|
@ -19,16 +20,20 @@ import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class ResourceProviderDstu3CodeSystemTest extends BaseResourceProviderDstu3Test {
|
||||
|
||||
@Autowired private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderDstu3CodeSystemTest.class);
|
||||
public static FhirContext ourCtx = FhirContext.forDstu3Cached();
|
||||
|
||||
|
@ -133,6 +138,8 @@ public class ResourceProviderDstu3CodeSystemTest extends BaseResourceProviderDst
|
|||
runInTransaction(() -> assertEquals(26L, myConceptDao.count()));
|
||||
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
|
||||
runInTransaction(() -> assertEquals(24L, myConceptDao.count()));
|
||||
|
||||
}
|
||||
|
|
|
@ -5,9 +5,15 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaSearchFirstPageBundleProvider;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
|
@ -15,8 +21,9 @@ import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
|
@ -26,8 +33,10 @@ import org.hl7.fhir.r4.model.DecimalType;
|
|||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Quantity;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.SearchParameter;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -36,6 +45,8 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
@ -43,6 +54,8 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
|||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
@ -63,8 +76,10 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
@AfterEach
|
||||
public void afterDisableExpunge() {
|
||||
myDaoConfig.setExpungeEnabled(new DaoConfig().isExpungeEnabled());
|
||||
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
|
||||
}
|
||||
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
|
||||
|
||||
ourRestServer.getInterceptorService().unregisterInterceptorsIf(t -> t instanceof CascadingDeleteInterceptor);
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEnableExpunge() {
|
||||
|
@ -187,6 +202,62 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
return dao;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteCascade() throws IOException {
|
||||
ourRestServer.registerInterceptor(new CascadingDeleteInterceptor(myFhirCtx, myDaoRegistry, myInterceptorRegistry));
|
||||
|
||||
// setup
|
||||
Organization organization = new Organization();
|
||||
organization.setName("FOO");
|
||||
IIdType organizationId = myOrganizationDao.create(organization).getId().toUnqualifiedVersionless();
|
||||
|
||||
Organization organization2 = new Organization();
|
||||
organization2.setName("FOO2");
|
||||
organization2.getPartOf().setReference(organizationId.getValue());
|
||||
IIdType organizationId2 = myOrganizationDao.create(organization2).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.setManagingOrganization(new Reference(organizationId2));
|
||||
IIdType patientId = myPatientDao.create(patient).getId().toUnqualifiedVersionless();
|
||||
|
||||
// execute
|
||||
String url = ourServerBase + "/Organization/" + organizationId.getIdPart() + "?" +
|
||||
Constants.PARAMETER_CASCADE_DELETE + "=" + Constants.CASCADE_DELETE
|
||||
+ "&" +
|
||||
JpaConstants.PARAM_DELETE_EXPUNGE + "=true"
|
||||
;
|
||||
HttpDelete delete = new HttpDelete(url);
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(delete)) {
|
||||
String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info("Response:\n{}", responseString);
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
}
|
||||
|
||||
runInTransaction(() -> {
|
||||
ResourceTable res;
|
||||
List<ResourceHistoryTable> versions;
|
||||
|
||||
res = myResourceTableDao.findById(patientId.getIdPartAsLong()).orElseThrow(() -> new IllegalStateException());
|
||||
assertNotNull(res.getDeleted());
|
||||
versions = myResourceHistoryTableDao.findAllVersionsForResourceIdInOrder(patientId.getIdPartAsLong());
|
||||
assertEquals(2, versions.size());
|
||||
assertEquals(1L, versions.get(0).getVersion());
|
||||
assertNull(versions.get(0).getDeleted());
|
||||
assertEquals(2L, versions.get(1).getVersion());
|
||||
assertNotNull(versions.get(1).getDeleted());
|
||||
|
||||
res = myResourceTableDao.findById(organizationId.getIdPartAsLong()).orElseThrow(() -> new IllegalStateException());
|
||||
assertNotNull(res.getDeleted());
|
||||
versions = myResourceHistoryTableDao.findAllVersionsForResourceIdInOrder(organizationId.getIdPartAsLong());
|
||||
assertEquals(2, versions.size());
|
||||
assertEquals(1L, versions.get(0).getVersion());
|
||||
assertNull(versions.get(0).getDeleted());
|
||||
assertEquals(2L, versions.get(1).getVersion());
|
||||
assertNotNull(versions.get(1).getDeleted());
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testExpungeInstanceOldVersionsAndDeleted() {
|
||||
createStandardPatients();
|
||||
|
@ -422,7 +493,7 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
assertExpunged(myTwoVersionObservationId.withVersion("2"));
|
||||
assertExpunged(myDeletedObservationId);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testExpungeSystemEverythingWithNormalizedQuantityStorageSupported() {
|
||||
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_STORAGE_SUPPORTED);
|
||||
|
@ -444,7 +515,7 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
assertExpunged(myTwoVersionObservationId.withVersion("2"));
|
||||
assertExpunged(myDeletedObservationId);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testExpungeTypeOldVersionsAndDeleted() {
|
||||
createStandardPatients();
|
||||
|
|
|
@ -340,6 +340,11 @@ public class GiantTransactionPerfTest {
|
|||
private class MockResourceHistoryTableDao implements IResourceHistoryTableDao {
|
||||
private int mySaveCount;
|
||||
|
||||
@Override
|
||||
public List<ResourceHistoryTable> findAllVersionsForResourceIdInOrder(Long theId) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResourceHistoryTable findForIdAndVersionAndFetchProvenance(long theId, long theVersion) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
|
|
@ -5,12 +5,15 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
@ -18,6 +21,10 @@ public class TermCodeSystemStorageSvcTest extends BaseJpaR4Test {
|
|||
|
||||
public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
|
||||
|
||||
@Autowired
|
||||
private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
|
||||
@Test
|
||||
public void testStoreNewCodeSystemVersionForExistingCodeSystemNoVersionId() {
|
||||
CodeSystem firstUpload = createCodeSystemWithMoreThan100Concepts();
|
||||
|
@ -126,6 +133,7 @@ public class TermCodeSystemStorageSvcTest extends BaseJpaR4Test {
|
|||
myTerminologyDeferredStorageSvc.setProcessDeferred(true);
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
myTerminologyDeferredStorageSvc.setProcessDeferred(false);
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(false, TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
assertEquals(theExpectedConceptCount, runInTransaction(() -> myTermConceptDao.count()));
|
||||
|
||||
}
|
||||
|
|
|
@ -10,10 +10,15 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.anyLong;
|
||||
import static org.mockito.ArgumentMatchers.same;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
@ -33,6 +38,9 @@ public class TermDeferredStorageSvcImplTest {
|
|||
@Mock
|
||||
private ITermCodeSystemVersionDao myTermCodeSystemVersionDao;
|
||||
|
||||
@Mock
|
||||
private JobExecution myJobExecution;
|
||||
|
||||
@Test
|
||||
public void testSaveDeferredWithExecutionSuspended() {
|
||||
TermDeferredStorageSvcImpl svc = new TermDeferredStorageSvcImpl();
|
||||
|
@ -41,6 +49,17 @@ public class TermDeferredStorageSvcImplTest {
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testStorageNotEmptyWhileJobsExecuting() {
|
||||
TermDeferredStorageSvcImpl svc = new TermDeferredStorageSvcImpl();
|
||||
ReflectionTestUtils.setField(svc, "myCurrentJobExecutions", Collections.singletonList(myJobExecution));
|
||||
|
||||
when(myJobExecution.isRunning()).thenReturn(true, false);
|
||||
assertFalse(svc.isStorageQueueEmpty());
|
||||
assertTrue(svc.isStorageQueueEmpty());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSaveDeferred_Concept() {
|
||||
TermConcept concept = new TermConcept();
|
||||
|
|
|
@ -5,20 +5,21 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_PRIMARY_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class TerminologyLoaderSvcLoincJpaTest extends BaseJpaR4Test {
|
||||
@Autowired private BatchJobHelper myBatchJobHelper;
|
||||
private TermLoaderSvcImpl mySvc;
|
||||
|
||||
private ZipCollectionBuilder myFiles;
|
||||
|
||||
@BeforeEach
|
||||
|
@ -62,6 +63,7 @@ public class TerminologyLoaderSvcLoincJpaTest extends BaseJpaR4Test {
|
|||
|
||||
mySvc.loadLoinc(myFiles.getFiles(), mySrd);
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(false, TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME );
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myTermCodeSystemDao.count());
|
||||
|
@ -87,6 +89,7 @@ public class TerminologyLoaderSvcLoincJpaTest extends BaseJpaR4Test {
|
|||
TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesWithPropertiesFileToZip(myFiles, "v268_loincupload.properties");
|
||||
mySvc.loadLoinc(myFiles.getFiles(), mySrd);
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(false, TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME );
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myTermCodeSystemDao.count());
|
||||
|
|
|
@ -5,6 +5,7 @@ import ca.uhn.fhir.context.support.ValidationSupportContext;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
|
@ -14,6 +15,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
|||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
@ -50,6 +52,7 @@ import java.util.Properties;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_DUPLICATE_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_DUPLICATE_FILE_DEFAULT;
|
||||
|
@ -75,6 +78,7 @@ import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000
|
|||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UPLOAD_PROPERTIES_FILE;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_XML_FILE;
|
||||
import static java.util.stream.Collectors.joining;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_ALL_VALUESET_ID;
|
||||
|
@ -130,9 +134,11 @@ public class TerminologySvcImplCurrentVersionR4Test extends BaseJpaR4Test {
|
|||
@Autowired
|
||||
private ITermReadSvc myITermReadSvc;
|
||||
|
||||
@Autowired
|
||||
@Qualifier(BaseConfig.JPA_VALIDATION_SUPPORT)
|
||||
private IValidationSupport myJpaPersistedResourceValidationSupport;
|
||||
@Autowired @Qualifier(BaseConfig.JPA_VALIDATION_SUPPORT)
|
||||
private IValidationSupport myJpaPersistedResourceValidationSupport;
|
||||
|
||||
@Autowired private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
|
||||
private ZipCollectionBuilder myFiles;
|
||||
private ServletRequestDetails myRequestDetails = new ServletRequestDetails();
|
||||
|
@ -690,6 +696,7 @@ public class TerminologySvcImplCurrentVersionR4Test extends BaseJpaR4Test {
|
|||
|
||||
String currentVer = "2.68";
|
||||
uploadLoincCodeSystem(currentVer, true);
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
|
||||
runCommonValidations(Lists.newArrayList(nonCurrentVer, currentVer));
|
||||
|
||||
|
@ -711,6 +718,7 @@ public class TerminologySvcImplCurrentVersionR4Test extends BaseJpaR4Test {
|
|||
|
||||
String lastCurrentVer = "2.69";
|
||||
uploadLoincCodeSystem(lastCurrentVer, true);
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
|
||||
runCommonValidations(Lists.newArrayList(firstCurrentVer, noCurrentVer, lastCurrentVer));
|
||||
|
||||
|
@ -775,11 +783,27 @@ public class TerminologySvcImplCurrentVersionR4Test extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
private TermCodeSystemVersion fetchCurrentCodeSystemVersion() {
|
||||
runInTransaction(() -> {
|
||||
List<TermCodeSystem> tcsList = myEntityManager.createQuery("from TermCodeSystem").getResultList();
|
||||
List<TermCodeSystemVersion> tcsvList = myEntityManager.createQuery("from TermCodeSystemVersion").getResultList();
|
||||
ourLog.error("tcslist: {}", tcsList.stream().map(tcs -> tcs.toString()).collect(joining("\n", "\n", "")));
|
||||
ourLog.error("tcsvlist: {}", tcsvList.stream().map(v -> v.toString()).collect(joining("\n", "\n", "")));
|
||||
|
||||
if (tcsList.size() != 1) {
|
||||
throw new IllegalStateException("More than one TCS: " +
|
||||
tcsList.stream().map(tcs -> String.valueOf(tcs.getPid())).collect(joining()));
|
||||
}
|
||||
if (tcsList.get(0).getCurrentVersion() == null) {
|
||||
throw new IllegalStateException("Current version is null in TCS: " + tcsList.get(0).getPid());
|
||||
}
|
||||
});
|
||||
|
||||
return runInTransaction(() -> (TermCodeSystemVersion) myEntityManager.createQuery(
|
||||
"select tcsv from TermCodeSystemVersion tcsv join fetch tcsv.myCodeSystem tcs " +
|
||||
"where tcs.myCurrentVersion = tcsv").getSingleResult());
|
||||
"select tcsv from TermCodeSystemVersion tcsv join fetch tcsv.myCodeSystem tcs " +
|
||||
"where tcs.myCurrentVersion = tcsv").getSingleResult());
|
||||
}
|
||||
|
||||
|
||||
private static void addBaseLoincMandatoryFilesToZip(
|
||||
ZipCollectionBuilder theFiles, Boolean theIncludeTop2000, String theClassPathPrefix) throws IOException {
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_XML_FILE.getCode());
|
||||
|
|
|
@ -8,6 +8,7 @@ import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
|||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
|
@ -18,6 +19,7 @@ import org.hl7.fhir.r4.model.codesystems.HttpVerb;
|
|||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
@ -29,6 +31,7 @@ import java.util.List;
|
|||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -39,6 +42,9 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
|
||||
public class TerminologySvcImplR4Test extends BaseTermR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TerminologySvcImplR4Test.class);
|
||||
|
||||
@Autowired private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
ConceptValidationOptions optsNoGuess = new ConceptValidationOptions();
|
||||
ConceptValidationOptions optsGuess = new ConceptValidationOptions().setInferSystem(true);
|
||||
|
||||
|
@ -424,6 +430,7 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test {
|
|||
|
||||
IIdType id_v2 = myCodeSystemDao.update(codeSystem, mySrd).getId().toUnqualified();
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
myBatchJobHelper.awaitAllBulkJobCompletions(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
|
||||
|
||||
runInTransaction(() -> {
|
||||
List<TermCodeSystemVersion> termCodeSystemVersions_updated = myTermCodeSystemVersionDao.findAll();
|
||||
|
|
|
@ -28,7 +28,7 @@ public class ZipCollectionBuilder {
|
|||
/**
|
||||
* Constructor
|
||||
*/
|
||||
ZipCollectionBuilder() {
|
||||
public ZipCollectionBuilder() {
|
||||
myFiles = new ArrayList<>();
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.Step;
|
||||
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
|
||||
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
|
||||
import org.springframework.batch.core.job.builder.FlowBuilder;
|
||||
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
|
||||
import org.springframework.batch.core.job.flow.Flow;
|
||||
import org.springframework.batch.core.job.flow.support.SimpleFlow;
|
||||
import org.springframework.batch.repeat.RepeatStatus;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Not intended to ever run. Used as a sandbox for "interesting" jobs
|
||||
*/
|
||||
public class DynamicJobFlowSandbox {
|
||||
protected static final Logger ourLog = LoggerFactory.getLogger(TermCodeSystemDeleteJobTest.class);
|
||||
|
||||
|
||||
@Autowired
|
||||
private JobBuilderFactory myJobBuilderFactory;
|
||||
|
||||
@Autowired
|
||||
private StepBuilderFactory myStepBuilderFactory;
|
||||
|
||||
private List<Long> versionPidList = Lists.newArrayList(3L, 5L);
|
||||
|
||||
@Bean
|
||||
public Job testJob() {
|
||||
SimpleJobBuilder jobBuilder = myJobBuilderFactory.get("job")
|
||||
.start(stepPreFlow());
|
||||
|
||||
// add a flow for each Pid
|
||||
List<Flow> flowForEachPidList = versionPidList.stream().map(this::getFlowForPid).collect(Collectors.toList());
|
||||
flowForEachPidList.forEach( flowForPid -> jobBuilder.on("COMPLETED").to(flowForPid) );
|
||||
|
||||
return jobBuilder.next(stepPostFlow()).build();
|
||||
}
|
||||
|
||||
|
||||
private Flow getFlowForPid(Long theLong) {
|
||||
return new FlowBuilder<SimpleFlow>("flow-for-Pid-" + theLong)
|
||||
.start(flowStep1(theLong))
|
||||
.next(fllowStep2(theLong))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
|
||||
public Step flowStep1(long theLong) {
|
||||
String name = "flow-step-1-for-Pid-" + theLong;
|
||||
return myStepBuilderFactory.get(name)
|
||||
.tasklet((contribution, chunkContext) -> {
|
||||
ourLog.info("\n\n" + name + " executed\n\n");
|
||||
return RepeatStatus.FINISHED;
|
||||
})
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
public Step fllowStep2(long theLong) {
|
||||
String name = "flow-step-2-for-Pid-" + theLong;
|
||||
return myStepBuilderFactory.get(name)
|
||||
.tasklet((contribution, chunkContext) -> {
|
||||
ourLog.info("\n\n" + name + " executed\n\n");
|
||||
return RepeatStatus.FINISHED;
|
||||
})
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
public Step stepPreFlow() {
|
||||
return myStepBuilderFactory.get("step-pre-flow")
|
||||
.tasklet((contribution, chunkContext) -> {
|
||||
ourLog.info("\n\nstep-pre-flow executed\n\n");
|
||||
return RepeatStatus.FINISHED;
|
||||
})
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
public Step stepPostFlow() {
|
||||
return myStepBuilderFactory.get("step-post-flow")
|
||||
.tasklet((contribution, chunkContext) -> {
|
||||
ourLog.info("\n\nstep-post-flow executed\n\n");
|
||||
return RepeatStatus.FINISHED;
|
||||
})
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,266 @@
|
|||
package ca.uhn.fhir.jpa.term.job;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.UploadStatistics;
|
||||
import ca.uhn.fhir.jpa.term.ZipCollectionBuilder;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.test.utilities.BatchJobHelper;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.batch.core.Job;
|
||||
import org.springframework.batch.core.JobExecution;
|
||||
import org.springframework.batch.core.JobParameter;
|
||||
import org.springframework.batch.core.JobParameters;
|
||||
import org.springframework.batch.core.JobParametersInvalidException;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.util.ResourceUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Properties;
|
||||
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.JOB_PARAM_CODE_SYSTEM_ID;
|
||||
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.TERM_CODE_SYSTEM_DELETE_JOB_NAME;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_DUPLICATE_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_DUPLICATE_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_ANSWERLIST_LINK_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_CODESYSTEM_MAKE_CURRENT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_CODESYSTEM_VERSION;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_DUPLICATE_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_GROUP_TERMS_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_HIERARCHY_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PARENT_GROUP_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_PRIMARY_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_RSNA_PLAYBOOK_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UPLOAD_PROPERTIES_FILE;
|
||||
import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_XML_FILE;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
||||
public class TermCodeSystemDeleteJobTest extends BaseJpaR4Test {
|
||||
|
||||
private final ServletRequestDetails myRequestDetails = new ServletRequestDetails();
|
||||
private Properties uploadProperties;
|
||||
|
||||
@Autowired private TermLoaderSvcImpl myTermLoaderSvc;
|
||||
@Autowired private IBatchJobSubmitter myJobSubmitter;
|
||||
@Autowired private BatchJobHelper myBatchJobHelper;
|
||||
|
||||
@Autowired @Qualifier(TERM_CODE_SYSTEM_DELETE_JOB_NAME)
|
||||
private Job myTermCodeSystemDeleteJob;
|
||||
|
||||
|
||||
private void initMultipleVersionLoad() throws Exception {
|
||||
File file = ResourceUtils.getFile("classpath:loinc-ver/" + LOINC_UPLOAD_PROPERTIES_FILE.getCode());
|
||||
uploadProperties = new Properties();
|
||||
uploadProperties.load(new FileInputStream(file));
|
||||
|
||||
IFhirResourceDao<ValueSet> valueSetIFhirResourceDao = myDaoRegistry.getResourceDao(ValueSet.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void runDeleteJobMultipleVersions() throws Exception {
|
||||
initMultipleVersionLoad();
|
||||
|
||||
// loading a loinc CS with version loads two versions (second one with null version)
|
||||
String firstCurrentVer = "2.67";
|
||||
uploadLoincCodeSystem(firstCurrentVer, true);
|
||||
|
||||
long[] termCodeSystemPidVect = new long[1]; //bypass final restriction
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myTermCodeSystemDao.count());
|
||||
|
||||
TermCodeSystem termCodeSystem = myTermCodeSystemDao.findByCodeSystemUri("http://loinc.org");
|
||||
assertNotNull(termCodeSystem);
|
||||
termCodeSystemPidVect[0] = termCodeSystem.getPid();
|
||||
|
||||
assertEquals(2, myTermCodeSystemVersionDao.count());
|
||||
assertEquals(162, myTermConceptDao.count());
|
||||
});
|
||||
|
||||
JobParameters jobParameters = new JobParameters(
|
||||
Collections.singletonMap(
|
||||
JOB_PARAM_CODE_SYSTEM_ID, new JobParameter(termCodeSystemPidVect[0], true) ));
|
||||
|
||||
|
||||
JobExecution jobExecution = myJobSubmitter.runJob(myTermCodeSystemDeleteJob, jobParameters);
|
||||
|
||||
|
||||
myBatchJobHelper.awaitJobCompletion(jobExecution);
|
||||
assertEquals("COMPLETED", jobExecution.getExitStatus().getExitCode());
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(0, myTermCodeSystemDao.count());
|
||||
assertNull(myTermCodeSystemDao.findByCodeSystemUri("http://loinc.org"));
|
||||
assertEquals(0, myTermCodeSystemVersionDao.count());
|
||||
assertEquals(0, myTermConceptDao.count());
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void runWithNoParameterFailsValidation() {
|
||||
JobParametersInvalidException thrown = Assertions.assertThrows(
|
||||
JobParametersInvalidException.class,
|
||||
() -> myJobSubmitter.runJob(myTermCodeSystemDeleteJob, new JobParameters())
|
||||
);
|
||||
assertEquals("This job needs Parameter: '" + JOB_PARAM_CODE_SYSTEM_ID + "'", thrown.getMessage());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void runWithNullParameterFailsValidation() {
|
||||
JobParameters jobParameters = new JobParameters(
|
||||
Collections.singletonMap(
|
||||
JOB_PARAM_CODE_SYSTEM_ID, new JobParameter((Long) null, true) ));
|
||||
|
||||
JobParametersInvalidException thrown = Assertions.assertThrows(
|
||||
JobParametersInvalidException.class,
|
||||
() -> myJobSubmitter.runJob(myTermCodeSystemDeleteJob, jobParameters)
|
||||
);
|
||||
assertEquals("'" + JOB_PARAM_CODE_SYSTEM_ID + "' parameter is null", thrown.getMessage());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void runWithParameterZeroFailsValidation() {
|
||||
JobParameters jobParameters = new JobParameters(
|
||||
Collections.singletonMap(
|
||||
JOB_PARAM_CODE_SYSTEM_ID, new JobParameter(0L, true) ));
|
||||
|
||||
JobParametersInvalidException thrown = Assertions.assertThrows(
|
||||
JobParametersInvalidException.class,
|
||||
() -> myJobSubmitter.runJob(myTermCodeSystemDeleteJob, jobParameters)
|
||||
);
|
||||
assertEquals("Invalid parameter '" + JOB_PARAM_CODE_SYSTEM_ID + "' value: 0", thrown.getMessage());
|
||||
}
|
||||
|
||||
|
||||
private IIdType uploadLoincCodeSystem(String theVersion, boolean theMakeItCurrent) throws Exception {
|
||||
ZipCollectionBuilder files = new ZipCollectionBuilder();
|
||||
|
||||
myRequestDetails.getUserData().put(LOINC_CODESYSTEM_MAKE_CURRENT, theMakeItCurrent);
|
||||
uploadProperties.put(LOINC_CODESYSTEM_MAKE_CURRENT.getCode(), Boolean.toString(theMakeItCurrent));
|
||||
|
||||
assertTrue(
|
||||
theVersion == null || theVersion.equals("2.67") || theVersion.equals("2.68") || theVersion.equals("2.69"),
|
||||
"Version supported are: 2.67, 2.68, 2.69 and null" );
|
||||
|
||||
if (StringUtils.isBlank(theVersion)) {
|
||||
uploadProperties.remove(LOINC_CODESYSTEM_VERSION.getCode());
|
||||
} else {
|
||||
uploadProperties.put(LOINC_CODESYSTEM_VERSION.getCode(), theVersion);
|
||||
}
|
||||
|
||||
addLoincMandatoryFilesToZip(files, theVersion);
|
||||
|
||||
UploadStatistics stats = myTermLoaderSvc.loadLoinc(files.getFiles(), mySrd);
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
|
||||
return stats.getTarget();
|
||||
}
|
||||
|
||||
|
||||
public void addLoincMandatoryFilesToZip(ZipCollectionBuilder theFiles, String theVersion) throws IOException {
|
||||
String theClassPathPrefix = getClassPathPrefix(theVersion);
|
||||
addBaseLoincMandatoryFilesToZip(theFiles, true, theClassPathPrefix);
|
||||
theFiles.addPropertiesZip(uploadProperties, LOINC_UPLOAD_PROPERTIES_FILE.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_PART_LINK_FILE_PRIMARY_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_PART_LINK_FILE_SUPPLEMENTARY_DEFAULT.getCode());
|
||||
}
|
||||
|
||||
|
||||
private static void addBaseLoincMandatoryFilesToZip(
|
||||
ZipCollectionBuilder theFiles, Boolean theIncludeTop2000, String theClassPathPrefix) throws IOException {
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_XML_FILE.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_GROUP_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_GROUP_TERMS_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_PARENT_GROUP_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_DUPLICATE_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_HIERARCHY_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_ANSWERLIST_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_ANSWERLIST_DUPLICATE_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_ANSWERLIST_LINK_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_ANSWERLIST_LINK_DUPLICATE_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_PART_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_PART_RELATED_CODE_MAPPING_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_DOCUMENT_ONTOLOGY_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_RSNA_PLAYBOOK_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_UNIVERSAL_LAB_ORDER_VALUESET_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_IEEE_MEDICAL_DEVICE_CODE_MAPPING_TABLE_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_IMAGING_DOCUMENT_CODES_FILE_DEFAULT.getCode());
|
||||
if (theIncludeTop2000) {
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_TOP2000_COMMON_LAB_RESULTS_SI_FILE_DEFAULT.getCode());
|
||||
theFiles.addFileZip(theClassPathPrefix, LOINC_TOP2000_COMMON_LAB_RESULTS_US_FILE_DEFAULT.getCode());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String getClassPathPrefix(String theVersion) {
|
||||
String theClassPathPrefix = "/loinc-ver/v-no-version/";
|
||||
|
||||
if (StringUtils.isBlank(theVersion)) return theClassPathPrefix;
|
||||
|
||||
switch(theVersion) {
|
||||
case "2.67":
|
||||
return "/loinc-ver/v267/";
|
||||
case "2.68":
|
||||
return "/loinc-ver/v268/";
|
||||
case "2.69":
|
||||
return "/loinc-ver/v269/";
|
||||
}
|
||||
|
||||
fail("Setup failed. Unexpected version: " + theVersion);
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue