Merge branch 'master' into 4850-partitioned-cr-issue

This commit is contained in:
justin.mckelvy 2023-05-26 10:03:10 -06:00
commit 63359c3a9e
204 changed files with 12998 additions and 1071 deletions

17
.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1,17 @@
migrate/ @hapifhir/data-migrations
hapi-fhir-cli/**/*.java @nathandoef
hapi-fhir-storage-cr/**/*.java @JPercival
hapi-fhir-jpaserver-subscription/**/*.java @fil512
hapi-fhir-jpaserver-mdm/**/*.java @fil512 @tadgh
hapi-fhir-storage/**/*.java @fil512
hapi-fhir-storage-batch2/**/*.java @michaelabuckley @jamesagnew
hapi-fhir-storage-batch2-jobs/**/*.java @michaelabuckley @jamesagnew
hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java @jamesagnew
hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/consent/ConsentInterceptor.java @jamesagnew

View File

@ -1,24 +0,0 @@
name: Reviewer Check
on:
pull_request
jobs:
build:
runs-on: ubuntu-latest
name: Add Migration Reviewers if Necessary
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Check if migrations changed
id: migrations-changed
uses: tj-actions/changed-files@v35
with:
files: |
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
- name: Add migration reviewers if migration files changed
if: steps.migrations-changed.outputs.any_changed == 'true'
uses: madrapps/add-reviewers@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
reviewers: michaelabuckley,jamesagnew

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.1-SNAPSHOT</version>
<version>6.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -1003,7 +1003,9 @@ public class JsonParser extends BaseParser implements IJsonLikeParser {
for (Iterator<String> keyIter = theObject.keyIterator(); keyIter.hasNext(); ) {
String nextName = keyIter.next();
if ("resourceType".equals(nextName)) {
continue;
if (theState.isToplevelResourceElement()) {
continue;
}
} else if ("extension".equals(nextName)) {
BaseJsonLikeArray array = grabJsonArray(theObject, nextName, "extension");
parseExtension(theState, array, false);

View File

@ -148,6 +148,10 @@ class ParserState<T> {
return myState.isPreResource();
}
boolean isToplevelResourceElement() {
return myState instanceof ParserState.ResourceStateHl7Org || myState instanceof ParserState.ResourceStateHapi;
}
private Object newContainedDt(IResource theTarget) {
return ReflectionUtil.newInstance(theTarget.getStructureFhirVersionEnum().getVersionImplementation().getContainedType());
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.1-SNAPSHOT</version>
<version>6.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.1-SNAPSHOT</version>
<version>6.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.1-SNAPSHOT</version>
<version>6.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -117,7 +117,7 @@
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.33</version>
<version>2.0</version>
</dependency>
<dependency>

View File

@ -0,0 +1,5 @@
---
type: add
issue: 4654
title: "Adds an Oracle embedded database to the automated migration test scaffolding. Also adds an initialization schema
(version 5.1.0) with test data to the automated migration tests."

View File

@ -1,4 +1,4 @@
---
type: add
issue: 4697
title: "Added R4 support for Questionnaire/$prepopulate and PlanDefinition/$package operations. These are operations are intended to support extended DaVinci DTR and SDC uses cases."
title: "Added R4 support for Questionnaire/$prepopulate, Questionnaire/$package and PlanDefinition/$package operations. These are operations are intended to support extended DaVinci DTR and SDC uses cases."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 4789
title: "Previously, there was the possibility for a race condition to happen in the initialization
of the email subscription processing component that would result in email not being sent out. This
issue has been fixed."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4804
jira: SMILE-5145
title: "Improved performance of `mdm-clear` operation by adding index and avoiding redundant deletion."

View File

@ -0,0 +1,11 @@
---
type: fix
issue: 4844
title: "/Patient/{patientid}/$everything?_type={resource types}
would omit resources that were not directly related to the Patient
resource (even if those resources were specified in the _type list).
This was in conflict with /Patient/{patientid}/$everything operation,
which did return said resources.
This has been fixed so both return all related resources, even if
those resources are not directly related to the Patient resource.
"

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4846
title: "Job maintenance service would throw an exception if a job definition is unknown, this would run maintenance on every job instance after it.
Now the maintenance will skip over unknown job definitions and display a warning log message indication a job definition is missing."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4853
title: "Previously, when validating resources that contain a display in a Coding/CodeableConcept different from the
display defined in the CodeSystem that is used, no errors are returned in the outcome. This is now fixed."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4860
title: "Running an $export that completes successfully results in a progress percentage of less than 100%.
This has now been fixed."

View File

@ -0,0 +1,4 @@
---
type: add
issue: 4861
title: "Add documentation for $care-gaps operation"

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4863
title: "Previously the SearchParameterCanonicalizer did not correctly convert DSTU2 and DSTU3 custom resources SearchParameters
into RuntimeSearchParam. This is now fixed."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4872
title: "POSTing a Bundle with over 100 references to the same resource will fail with HAPI-2207 'Multiple resources match'.
This has been fixed."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4873
title: "Previously, if the fhirId in ResourceTable happened to be set to an empty string, the resourceId would be missing when trying to generate the full ID string. This has now been fixed."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4875
title: "Previously, `$binary-access-write` operation didn't trigger `STORAGE_BINARY_ASSIGN_BLOB_ID_PREFIX` Pointcut.
This has been fixed."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4886
title: "Requests to start an $export of Patient or Group will now fail with 404 ResourceNotFound when the target
resources do not exist. Before, the system would start a bulk export background job which would then fail."

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 4891
title: "Initiating a bulk export with a _type filter would sometimes return
resource types not specified in the filter.
This has been fixed.
"

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4893
title: "Update the IRuleBuilder to support Patient Export rules via the new `patientExportOnPatient` method on the IRuleBuilder. Previously, it was accidentally using Group Export rules."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4910
title: "Remove some references to `all_constraints` table in oracle database migration tasks which were causing errors for version 19c."

View File

@ -0,0 +1,4 @@
---
type: perf
issue: 4915
title: "Includes by canonical url now use an indexed query, and are much faster."

View File

@ -0,0 +1,5 @@
---
type: perf
issue: 4920
title: "The `$expunge` operation has been slightly optimized and should issue fewer SQL
statements to the database."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4878
title: "Batch jobs occasionaly reported zero (0) record processed counts. This has been corrected."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4896
title: "The _lastUpdated query parameter is no longer applied to _include or _revinclude search results."

View File

@ -0,0 +1,4 @@
---
type: add
issue: 4911
title: "Added a new SubscriptionTopicDispatcher service for use by java extensions that need to dispatch their own subscription topic notifications"

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4922
title: "R5 Subscription.filterBy.resourceType failed to deserialize because the deserializer skipped all elements named
'resourceType'. This has been changed so that only toplevel resourceType elements are skipped in the deserialization process."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 4937
title: "The SQL schema migrator now returns a status flag indicating whether the schema was initialized
or not."

View File

@ -1 +1,7 @@
Users of the `Resource.meta.source` field, as well as users of the `_source` parameter should perform a global $reindex after upgrading to this version of HAPI FHIR with the following parameters:
```url
[base]/$reindex?reindexSearchParameters=false&optimizeStorage=ALL_VERSIONS
```
The previous mechanism for storing and indexing these parameters is inefficient and will be replaced in a future release of HAPI FHIR. Performing this reindex operation ensures that existing data will continue to be searchable.

View File

@ -0,0 +1,73 @@
# Care Gaps
## Overview
A gap in care refers to a discrepancy or gap in a patient's care that has been identified through analysis of their medical records, history, and current health status.
These gaps can include missing or incomplete information, unmet health needs, and opportunities for preventative care or intervention. Identifying and addressing care gaps can help improve the quality of care provided to patients, reduce healthcare costs, and ultimately lead to better health outcomes.
Example: This woman was supposed to have a breast cancer screening this year but did not. Lets reach out to her and get that scheduled.
A Gaps in Care Report is designed to communicate actual or perceived gaps in care between systems, such as the payers system and providers EMR. The report provides opportunities for providers to provide missing care and/or to communicate care provision data to payers. The report may also provide information for upcoming care opportunities, prospective gaps.
The gaps in care flow is between a provider and a measurement organizations system performing analytics.
<a href="/hapi-fhir/docs/images/caregapsflow.png"><img src="/hapi-fhir/docs/images/caregapsflow.png" alt="Care Gaps Flow" style="margin-left: 15px; margin-bottom: 15px;" /></a><sub><sup>Sourced from [Implementation Guide](http://hl7.org/fhir/us/davinci-deqm/2023Jan/gaps-in-care-reporting.html)</sup></sub>
The Gaps in Care Reporting uses the [DEQM Individual MeasureReport Profile](http://hl7.org/fhir/us/davinci-deqm/2023Jan/StructureDefinition-indv-measurereport-deqm.html). This allows the Gaps in Care Reporting to use the same machinery as the Individual Reporting to calculate measures and represent the results of individual calculation.
The following resources are used in the Gaps in Care Reporting Scenario:
| Report Type | Profile Name | Link to Profile |
|---------------|:---------------------------------------:|-----------------------------------------------------------------------------------------------------------------------------------|
| Bundle | DEQM Gaps In Care Bundle Profile | [DEQM Gaps In Care Bundle Profile](http://hl7.org/fhir/us/davinci-deqm/2023Jan/StructureDefinition-gaps-bundle-deqm.html) |
| Composition | DEQM Gaps In Care Composition Profile | [DEQM Gaps In Care Composition Profile](http://hl7.org/fhir/us/davinci-deqm/2023Jan/StructureDefinition-gaps-composition-deqm.html) |
| DetectedIssue | DEQM Gaps In Care DetectedIssue Profile | [DEQM Gaps In Care Detected Profile](http://hl7.org/fhir/us/davinci-deqm/2023Jan/StructureDefinition-gaps-detectedissue-deqm.html) |
| Group | DEQM Gaps In Care Group Profile | [DEQM Gaps In Care Group Profile](http://hl7.org/fhir/us/davinci-deqm/2023Jan/StructureDefinition-gaps-group-deqm.html) |
| MeasureReport | DEQM Gaps In Care MeasureReport Profile | [DEQM Gaps In Care MeasureReport Profile](http://hl7.org/fhir/us/davinci-deqm/2023Jan/StructureDefinition-indv-measurereport-deqm.html) |
## Gaps in Care Reporting
[Gaps through period](http://hl7.org/fhir/us/davinci-deqm/2023Jan/index.html#glossary) is the time period defined by a Client for running the Gaps in Care Report.
* When the [gaps through period](http://hl7.org/fhir/us/davinci-deqm/2023Jan/index.html#glossary) ends on a date that is in the future, the Gaps in Care Reporting is said to look for care gaps prospectively. In this scenario, it provides providers with opportunities to assess anticipated [open gaps](http://build.fhir.org/ig/HL7/davinci-deqm/index.html#glossary) and take proper actions to close the gaps.
* When the [gaps through period](http://hl7.org/fhir/us/davinci-deqm/2023Jan/index.html#glossary) ends on a date that is in the past, the Gaps in Care Reporting is said to look for care gaps retrospectively. In the retrospective scenario, identified [open gaps](http://build.fhir.org/ig/HL7/davinci-deqm/index.html#glossary) can no longer be acted upon to meet the quality measure.
| Use Case | care-gaps Operation | Gaps Through Period Start Date | Gaps Through Period End Date | Report Calculated Date | Colorectal Cancer Screening - Colonoscopy Date | Gaps in Care Report |
|---------------|:---------------------------------------:|---------------------------------------------------------------------------------------------------------------------------------------|------------------------------|------------------------|------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Prospective Use Case | $care-gaps?periodStart=2021-01-01&periodEnd=2021-06-30&subject=Patient/123&measureId=EXM130-7.3.000&status=open-gap | 2021-01-01 | 2021-06-30 | 2021-04-01 | Example: patient had colonoscopy on 2011-05-03 | Returns gaps through 2021-06-30. The Gaps in Care Report indicates the patient has an [open gaps](http://build.fhir.org/ig/HL7/davinci-deqm/index.html#glossary) for the colorectal cancer screening measure. By 2021-06-30, the colonoscopy would be over 10 years. |
| Retrospective Use Case | $care-gaps?periodStart=2020-01-01&periodEnd=2020-12-31&subject=Patient/123&measureId=EXM130-7.3.000&status=open-gap | 2020-01-01| 2020-12-31 | 2021-04-01 | Example: patient had colonoscopy on 2011-05-03 | Returns gaps through 2020-12-31. The Gaps in Care Report indicates the patient has a [closed gaps](http://build.fhir.org/ig/HL7/davinci-deqm/index.html#glossary) for the colorectal cancer screening measure. Since on 2020-12-31, the procedure would have occurred within the specified 10-year timeframe. |
## Operations
Hapi FHIR implements the [$care-gaps](http://hl7.org/fhir/us/davinci-deqm/2023Jan/OperationDefinition-care-gaps.html) operation.
## Care Gaps
The `$care-gaps` operation is used to run a Gaps in Care Report.
### Testing care gaps on Hapi FHIR
Hapi FHIR is integrated with `$care-gaps` operations and following are the steps to identify open gap on sample data following the remediation step to generate a report for closed gap.
All the sample files used below are available on [hapi-fhir](https://github.com/hapifhir/hapi-fhir/tree/master/hapi-fhir-storage-cr/src/test/resources) code base under resources folder.
1. Submit payer content
```bash
POST http://localhost/fhir/ CaregapsColorectalCancerScreeningsFHIR-bundle.json
```
2. Submit payer org data
```bash
POST http://localhost/fhir/ CaregapsAuthorAndReporter.json
```
3. Submit provider data
```bash
POST http://localhost/fhir/Measure/ColorectalCancerScreeningsFHIR/$submit-data CaregapsPatientData.json
```
4. Provider runs care-gaps operation to identify open gap.
```bash
GET http://localhost/fhir/Measure/$care-gaps?periodStart=2020-01-01&periodEnd=2020-12-31&status=open-gap&status=closed-gap&subject=Patient/end-to-end-EXM130&measureId=ColorectalCancerScreeningsFHIR
```
5. Provider fixes gaps
```bash
POST http://localhost/fhir/Measure/ColorectalCancerScreeningsFHIR/$submit-data CaregapsSubmitDataCloseGap.json
```
6. Provider runs care-gaps operation to identify the gap is closed.
```bash
GET http://localhost/fhir/Measure/$care-gaps?periodStart=2020-01-01&periodEnd=2020-12-31&status=open-gap&status=closed-gap&subject=Patient/end-to-end-EXM130&measureId=ColorectalCancerScreeningsFHIR
```

View File

@ -0,0 +1,270 @@
# PlanDefinition
## Introduction
The FHIR Clinical Reasoning Module defines the [PlanDefinition resource](https://www.hl7.org/fhir/plandefinition.html) and several [associated operations](https://www.hl7.org/fhir/plandefinition-operations.html). A plan definition is a pre-defined group of actions to be taken in particular circumstances, often including conditional elements, options, and other decision points. The resource is flexible enough to be used to represent a variety of workflows, as well as clinical decision support and quality improvement assets, including order sets, protocols, and decision support rules.
PlanDefinitions can contain hierarchical groups of action definitions, where each action definition describes an activity to be performed (often in terms of an ActivityDefinition resource), and each group defines additional behavior, relationships, and applicable conditions between the actions in the overall definition.
In addition to describing what should take place, each action in a plan definition can specify when and whether the action should take place. For when the action should be taken, the trigger element specifies the action should be taken in response to some trigger occurring (such as a particular point in a workflow being reached, or as the result of a prescription being ordered). For whether the action should be taken, the condition element can be used to provide an expression that evaluates to true or false to indicate the applicability of the action to the specific context.
The process of applying a PlanDefinition to a particular context typically produces request resources representing the actions that should be performed, grouped within a RequestOrchestration to capture relationships between the resulting request resources.
Each ActivityDefinition is used to construct a specific resource, based on the definition of the activity and combined with contextual information for the particular patient that the plan definition is being applied to.
```json
{
"resourceType": "PlanDefinition",
"id": "opioidcds-04",
"url": "http://hl7.org/fhir/ig/opioid-cds/PlanDefinition/opioidcds-04",
"identifier": [
{
"system": "urn:ietf:rfc:3986",
"value": "urn:oid:2.16.840.1.113883.4.642.11.4"
},
{
"use": "official",
"value": "cdc-opioid-guidance"
}
],
"version": "0.1.0",
"name": "Cdcopioid04",
"title": "CDC Opioid Prescribing Guideline Recommendation #4",
"type": {
"coding": [
{
"system": "http://terminology.hl7.org/CodeSystem/plan-definition-type",
"code": "eca-rule",
"display": "ECA Rule"
}
]
},
"status": "draft",
"date": "2018-03-19",
"publisher": "Centers for Disease Control and Prevention (CDC)",
"description": "When starting opioid therapy for chronic pain, clinicians should prescribe immediate-release opioids instead of extended-release/long-acting (ER/LA) opioids.",
"useContext": [
{
"code": {
"system": "http://terminology.hl7.org/CodeSystem/usage-context-type",
"code": "focus",
"display": "Clinical Focus"
},
"valueCodeableConcept": {
"coding": [
{
"system": "http://snomed.info/sct",
"code": "182888003",
"display": "Medication requested (situation)"
}
]
}
},
{
"code": {
"system": "http://terminology.hl7.org/CodeSystem/usage-context-type",
"code": "focus",
"display": "Clinical Focus"
},
"valueCodeableConcept": {
"coding": [
{
"system": "http://snomed.info/sct",
"code": "82423001",
"display": "Chronic pain (finding)"
}
]
}
}
],
"jurisdiction": [
{
"coding": [
{
"system": "urn:iso:std:iso:3166",
"code": "US",
"display": "United States of America"
}
]
}
],
"purpose": "CDCs Guideline for Prescribing Opioids for Chronic Pain is intended to improve communication between providers and patients about the risks and benefits of opioid therapy for chronic pain, improve the safety and effectiveness of pain treatment, and reduce the risks associated with long-term opioid therapy, including opioid use disorder and overdose. The Guideline is not intended for patients who are in active cancer treatment, palliative care, or end-of-life care.",
"usage": "Providers should use caution when prescribing extended-release/long-acting (ER/LA) opioids as they carry a higher risk and negligible benefit compared to immediate-release opioids.",
"copyright": "© CDC 2016+.",
"topic": [
{
"text": "Opioid Prescribing"
}
],
"author": [
{
"name": "Kensaku Kawamoto, MD, PhD, MHS"
},
{
"name": "Bryn Rhodes"
},
{
"name": "Floyd Eisenberg, MD, MPH"
},
{
"name": "Robert McClure, MD, MPH"
}
],
"relatedArtifact": [
{
"type": "documentation",
"display": "CDC guideline for prescribing opioids for chronic pain",
"document": {
"url": "https://guidelines.gov/summaries/summary/50153/cdc-guideline-for-prescribing-opioids-for-chronic-pain---united-states-2016#420"
}
},
{
"type": "documentation",
"display": "MME Conversion Tables",
"document": {
"url": "https://www.cdc.gov/drugoverdose/pdf/calculating_total_daily_dose-a.pdf"
}
}
],
"library": [
"http://example.org/fhir/Library/opioidcds-recommendation-04"
],
"action": [
{
"title": "Extended-release opioid prescription triggered.",
"description": "Checking if the trigger prescription meets the inclusion criteria for recommendation #4 workflow.",
"documentation": [
{
"type": "documentation",
"document": {
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/cqf-strengthOfRecommendation",
"valueCodeableConcept": {
"coding": [
{
"system": "http://terminology.hl7.org/CodeSystem/recommendation-strength",
"code": "strong",
"display": "Strong"
}
]
}
},
{
"url": "http://hl7.org/fhir/StructureDefinition/cqf-qualityOfEvidence",
"valueCodeableConcept": {
"coding": [
{
"system": "http://terminology.hl7.org/CodeSystem/evidence-quality",
"code": "low",
"display": "Low quality"
}
]
}
}
]
}
}
],
"trigger": [
{
"type": "named-event",
"name": "medication-prescribe"
}
],
"condition": [
{
"kind": "applicability",
"expression": {
"description": "Check whether the opioid prescription for the existing patient is extended-release without any opioids-with-abuse-potential prescribed in the past 90 days.",
"language": "text/cql-identifier",
"expression": "Inclusion Criteria"
}
}
],
"groupingBehavior": "visual-group",
"selectionBehavior": "exactly-one",
"dynamicValue": [
{
"path": "action.title",
"expression": {
"language": "text/cql-identifier",
"expression": "Get Summary"
}
},
{
"path": "action.description",
"expression": {
"language": "text/cql-identifier",
"expression": "Get Detail"
}
},
{
"path": "activity.extension",
"expression": {
"language": "text/cql-identifier",
"expression": "Get Indicator"
}
}
],
"action": [
{
"description": "Will prescribe immediate release"
},
{
"description": "Risk of overdose carefully considered and outweighed by benefit; snooze 3 mo"
},
{
"description": "N/A - see comment; snooze 3 mo"
}
]
}
]
}
```
## Operations
HAPI implements the [$apply](http://hl7.org/fhir/uv/cpg/OperationDefinition-cpg-plandefinition-apply.html) operation. Support for additional operations is planned.
## Apply
The `$apply` operation applies a PlanDefinition to a given context. This implementation follows the [FHIR Specification](https://www.hl7.org/fhir/plandefinition.html#12.23.4.3) and supports the [FHIR Clinical Guidelines IG](http://hl7.org/fhir/uv/cpg/index.html). In addition, an R5 version of apply is made available for R4 instances. This will cause $apply to return a Bundle of resources instead of a CarePlan. This can be invoked with `$r5.apply`.
### Example PlanDefinition
Some example PlanDefinition workflows are available in the [opioid-cds-r4](https://github.com/cqframework/opioid-cds-r4) IG. Full Bundles with all the required supporting resources are available [here](https://github.com/cqframework/opioid-cds-r4/tree/1e543f781138f3d85404b7f65a92ff713519ef2c/bundles). You can download a Bundle and load it on your server as a transaction:
```bash
POST http://your-server-base/fhir opioidcds-10-patient-view-bundle.json
```
These Bundles do not include example Patient clinical data. Applying a PlanDefinition can be invoked with:
```bash
GET http://your-server-base/fhir/PlanDefinition/opioidcds-10-patient-view/$apply?subject=Patient/patientId&encounter=Encounter/encounterId&practitioner=Practitioner/practitionerId
```
### Additional Parameters
The following additional parameters are supported for the `$apply` and `$r5.apply` operation:
| Parameter | Type | Description |
|-----------|------------|-------------|
| organization | String | The organization in context |
| userType | String | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) |
| userLanguage | String | Preferred language of the person using the system |
| userTaskContext | String | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources |
| setting | String | The current setting of the request (inpatient, outpatient, etc.) |
| settingContext | String | Additional detail about the setting of the request, if any |
| parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. |
| data | Bundle | Data to be made available to the PlanDefinition evaluation. |
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. |
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. |
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. |
## Package
The `package` operation for [PlanDefinition](https://www.hl7.org/fhir/plandefinition.html) will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html).

View File

@ -0,0 +1,499 @@
# Questionnaires
## Introduction
The FHIR Clinical Reasoning Module defines the [Questionnaire resource](https://www.hl7.org/fhir/questionnaire.html). A Questionnaire is an organized collection of questions intended to solicit information from patients, providers or other individuals involved in the healthcare domain. They may be simple flat lists of questions or can be hierarchically organized in groups and sub-groups, each containing questions. The Questionnaire defines the questions to be asked, how they are ordered and grouped, any intervening instructional text and what the constraints are on the allowed answers. The results of a Questionnaire can be communicated using the QuestionnaireResponse resource.
Questionnaires cover the need to communicate data originating from forms used in medical history examinations, research questionnaires and sometimes full clinical specialty records. In many systems this data is collected using user-defined screens and forms. Questionnaires define specifics about data capture - exactly what questions were asked, in what order, what choices for answers were, etc. Each of these questions is part of the Questionnaire, and as such the Questionnaire is a separately identifiable Resource, whereas the individual questions are not. (Questionnaire questions can be linked to shared data elements using the Questionnaire.item.definition element.)
In addition to its use as a means for capturing data, Questionnaires can also be useful as a mechanism of defining a standardized 'presentation' of data that might already exist. For example, a peri-natal form or diabetes management form. In this use, the benefit is to expose a large volume of data in a predictable way that can be defined outside the user-interface design of the relevant system. The form might allow data to be edited or might be read-only. In some cases, the QuestionnaireResponse might not be intended to be persisted.
## Operations
HAPI implements the following operations from the [Structured Data Capture IG](https://hl7.org/fhir/uv/sdc/index.html)
* [$populate](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html)
* [$extract](http://hl7.org/fhir/uv/sdc/OperationDefinition-QuestionnaireResponse-extract.html)
Support for additional operations is planned.
## Populate
The `populate` operation generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire.
### Example Questionnaire
```json
{
"resourceType": "Questionnaire",
"id": "ASLPA1",
"meta": {
"versionId": "1",
"lastUpdated": "2023-05-09T19:02:10.538-06:00",
"source": "#jucRbegv3NMJkZ8X"
},
"extension": [
{
"url": "http://hl7.org/fhir/uv/cpg/StructureDefinition/cpg-knowledgeCapability",
"valueCode": "shareable"
},
{
"url": "http://hl7.org/fhir/uv/cpg/StructureDefinition/cpg-knowledgeCapability",
"valueCode": "computable"
},
{
"url": "http://hl7.org/fhir/uv/cpg/StructureDefinition/cpg-knowledgeCapability",
"valueCode": "publishable"
},
{
"url": "http://hl7.org/fhir/uv/cpg/StructureDefinition/cpg-knowledgeRepresentationLevel",
"valueCode": "structured"
},
{
"url": "http://hl7.org/fhir/StructureDefinition/cqf-library",
"valueCanonical": "http://example.org/sdh/dtr/aslp/Library/ASLPDataElements"
}
],
"url": "http://example.org/sdh/dtr/aslp/Questionnaire/ASLPA1",
"name": "ASLPA1",
"title": "ASLP.A1 Adult Sleep Studies",
"status": "active",
"experimental": false,
"description": "Adult Sleep Studies Prior Authorization Form",
"useContext": [
{
"code": {
"system": "http://terminology.hl7.org/CodeSystem/usage-context-type",
"code": "task",
"display": "Workflow Task"
},
"valueCodeableConcept": {
"coding": [
{
"system": "http://fhir.org/guides/nachc/hiv-cds/CodeSystem/activity-codes",
"code": "ASLP.A1",
"display": "Adult Sleep Studies"
}
]
}
}
],
"item": [
{
"extension": [
{
"url": "http://hl7.org/fhir/uv/sdc/StructureDefinition/sdc-questionnaire-itemPopulationContext",
"valueExpression": {
"language": "text/cql-identifier",
"expression": "Sleep Study"
}
}
],
"linkId": "0",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order",
"text": "A sleep study procedure being ordered",
"type": "group",
"repeats": true,
"item": [
{
"linkId": "1",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order#ServiceRequest.code",
"text": "A sleep study procedure being ordered",
"type": "choice",
"answerValueSet": "http://example.org/sdh/dtr/aslp/ValueSet/aslp-a1-de1-codes-grouper"
},
{
"linkId": "2",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order#ServiceRequest.occurrence[x]",
"text": "Date of the procedure",
"type": "dateTime"
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/uv/sdc/StructureDefinition/sdc-questionnaire-initialExpression",
"valueExpression": {
"language": "text/cql-identifier",
"expression": "Diagnosis of Obstructive Sleep Apnea"
}
}
],
"linkId": "3",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-diagnosis-of-obstructive-sleep-apnea#Condition.code",
"text": "Diagnosis of Obstructive Sleep Apnea",
"type": "choice",
"answerValueSet": "http://example.org/sdh/dtr/aslp/ValueSet/aslp-a1-de17"
},
{
"extension": [
{
"url": "http://hl7.org/fhir/uv/sdc/StructureDefinition/sdc-questionnaire-initialExpression",
"valueExpression": {
"language": "text/cql-identifier",
"expression": "History of Hypertension"
}
}
],
"linkId": "4",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-history-of-hypertension#Observation.value[x]",
"text": "History of Hypertension",
"type": "boolean"
},
{
"extension": [
{
"url": "http://hl7.org/fhir/uv/sdc/StructureDefinition/sdc-questionnaire-initialExpression",
"valueExpression": {
"language": "text/cql-identifier",
"expression": "History of Diabetes"
}
}
],
"linkId": "5",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-history-of-diabetes#Observation.value[x]",
"text": "History of Diabetes",
"type": "boolean"
},
{
"extension": [
{
"url": "http://hl7.org/fhir/uv/sdc/StructureDefinition/sdc-questionnaire-initialExpression",
"valueExpression": {
"language": "text/cql-identifier",
"expression": "Neck Circumference"
}
}
],
"linkId": "6",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-height#Observation.value[x]",
"text": "Neck circumference (in inches)",
"type": "quantity"
},
{
"extension": [
{
"url": "http://hl7.org/fhir/uv/sdc/StructureDefinition/sdc-questionnaire-initialExpression",
"valueExpression": {
"language": "text/cql-identifier",
"expression": "Height"
}
}
],
"linkId": "7",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-height#Observation.value[x]",
"text": "Height (in inches)",
"type": "quantity"
},
{
"extension": [
{
"url": "http://hl7.org/fhir/uv/sdc/StructureDefinition/sdc-questionnaire-initialExpression",
"valueExpression": {
"language": "text/cql-identifier",
"expression": "Weight"
}
}
],
"linkId": "8",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-weight#Observation.value[x]",
"text": "Weight (in pounds)",
"type": "quantity"
},
{
"extension": [
{
"url": "http://hl7.org/fhir/uv/sdc/StructureDefinition/sdc-questionnaire-initialExpression",
"valueExpression": {
"language": "text/cql-identifier",
"expression": "BMI"
}
}
],
"linkId": "9",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-bmi#Observation.value[x]",
"text": "Body mass index (BMI)",
"type": "quantity"
}
]
}
```
### Example QuestionnaireResponse
```json
{
"resourceType": "QuestionnaireResponse",
"id": "ASLPA1-positive-response",
"extension": [
{
"url": "http://hl7.org/fhir/us/davinci-dtr/StructureDefinition/dtr-questionnaireresponse-questionnaire",
"valueReference": {
"reference": "#ASLPA1-positive"
}
}
],
"questionnaire": "http://example.org/sdh/dtr/aslp/Questionnaire/ASLPA1",
"status": "in-progress",
"subject": {
"reference": "Patient/positive"
},
"item": [
{
"linkId": "0",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order",
"text": "A sleep study procedure being ordered",
"item": [
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "1",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order#ServiceRequest.code",
"text": "A sleep study procedure being ordered",
"answer": [
{
"valueCoding": {
"system": "http://example.org/sdh/dtr/aslp/CodeSystem/aslp-codes",
"code": "ASLP.A1.DE2",
"display": "Home sleep apnea testing (HSAT)"
}
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "2",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order#ServiceRequest.occurrence[x]",
"text": "Date of the procedure",
"answer": [
{
"valueDateTime": "2023-04-10T08:00:00.000Z"
}
]
}
]
},
{
"linkId": "0",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order",
"text": "A sleep study procedure being ordered",
"item": [
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "1",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order#ServiceRequest.code",
"text": "A sleep study procedure being ordered",
"answer": [
{
"valueCoding": {
"system": "http://example.org/sdh/dtr/aslp/CodeSystem/aslp-codes",
"code": "ASLP.A1.DE14",
"display": "Artificial intelligence (AI)"
}
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "2",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-sleep-study-order#ServiceRequest.occurrence[x]",
"text": "Date of the procedure",
"answer": [
{
"valueDateTime": "2023-04-15T08:00:00.000Z"
}
]
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "3",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-diagnosis-of-obstructive-sleep-apnea#Condition.code",
"text": "Diagnosis of Obstructive Sleep Apnea",
"answer": [
{
"valueCoding": {
"system": "http://example.org/sdh/dtr/aslp/CodeSystem/aslp-codes",
"code": "ASLP.A1.DE17",
"display": "Obstructive sleep apnea (OSA)"
}
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "4",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-history-of-hypertension#Observation.value[x]",
"text": "History of Hypertension",
"answer": [
{
"valueBoolean": true
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "5",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-history-of-diabetes#Observation.value[x]",
"text": "History of Diabetes",
"answer": [
{
"valueBoolean": true
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "6",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-height#Observation.value[x]",
"text": "Neck circumference (in inches)",
"answer": [
{
"valueQuantity": {
"value": 16,
"unit": "[in_i]",
"system": "http://unitsofmeasure.org",
"code": "[in_i]"
}
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "7",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-height#Observation.value[x]",
"text": "Height (in inches)",
"answer": [
{
"valueQuantity": {
"value": 69,
"unit": "[in_i]",
"system": "http://unitsofmeasure.org",
"code": "[in_i]"
}
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "8",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-weight#Observation.value[x]",
"text": "Weight (in pounds)",
"answer": [
{
"valueQuantity": {
"value": 185,
"unit": "[lb_av]",
"system": "http://unitsofmeasure.org",
"code": "[lb_av]"
}
}
]
},
{
"extension": [
{
"url": "http://hl7.org/fhir/StructureDefinition/questionnaireresponse-author",
"valueReference": {
"reference": "http://cqframework.org/fhir/Device/clinical-quality-language"
}
}
],
"linkId": "9",
"definition": "http://example.org/sdh/dtr/aslp/StructureDefinition/aslp-bmi#Observation.value[x]",
"text": "Body mass index (BMI)",
"answer": [
{
"valueQuantity": {
"value": 16.2,
"unit": "kg/m2",
"system": "http://unitsofmeasure.org",
"code": "kg/m2"
}
}
]
}
]
}
```
## Extract
The `extract` operation takes a completed [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) and converts it to a Bundle of resources by using metadata embedded in the [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) the QuestionnaireResponse is based on. The extracted resources might include Observations, MedicationStatements and other standard FHIR resources which can then be shared and manipulated. When invoking the $extract operation, care should be taken that the submitted QuestionnaireResponse is itself valid. If not, the extract operation could fail (with appropriate OperationOutcomes) or, more problematic, might succeed but provide incorrect output.
This implementation allows for both [Observation based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction.
## Package
The `package` operation for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html).

View File

@ -88,7 +88,10 @@ page.server_jpa_batch.introduction=Batch Introduction
section.clinical_reasoning.title=Clinical Reasoning
page.clinical_reasoning.overview=Clinical Reasoning Overview
page.clinical_reasoning.cql=CQL
page.clinical_reasoning.caregaps=Care Gaps
page.clinical_reasoning.measures=Measures
page.clinical_reasoning.plan_definitions=PlanDefinitions
page.clinical_reasoning.questionnaires=Questionnaires
section.interceptors.title=Interceptors
page.interceptors.interceptors=Interceptors Overview

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

View File

@ -315,7 +315,7 @@
</dependency>
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
<artifactId>jakarta.el</artifactId>
</dependency>
<!-- Note that we need this dependency to send log4j logging requests to slf4j -->
@ -433,11 +433,6 @@
</dependency>
</dependencies>
<properties>
<jackson.version>2.7.1</jackson.version>
</properties>
<build>
<plugins>
<plugin>

View File

@ -41,6 +41,7 @@ import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
@ -56,6 +57,7 @@ import javax.persistence.Query;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
@ -172,9 +174,16 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
theRequest.getSort()
);
Page<Batch2JobInstanceEntity> pageOfEntities = myJobInstanceRepository.findAll(pageRequest);
String jobStatus = theRequest.getJobStatus();
if (Objects.equals(jobStatus, "")) {
Page<Batch2JobInstanceEntity> pageOfEntities = myJobInstanceRepository.findAll(pageRequest);
return pageOfEntities.map(this::toInstance);
}
return pageOfEntities.map(this::toInstance);
StatusEnum status = StatusEnum.valueOf(jobStatus);
List<JobInstance> jobs = toInstanceList(myJobInstanceRepository.findInstancesByJobStatus(status, pageRequest));
Integer jobsOfStatus = myJobInstanceRepository.findTotalJobsOfStatus(status);
return new PageImpl<>(jobs, pageRequest, jobsOfStatus);
}
private List<JobInstance> toInstanceList(List<Batch2JobInstanceEntity> theInstancesByJobDefinitionId) {

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.jpa.binary.api.StoredDetails;
import ca.uhn.fhir.jpa.binary.svc.BaseBinaryStorageSvcImpl;
import ca.uhn.fhir.jpa.dao.data.IBinaryStorageEntityDao;
import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import com.google.common.hash.HashingInputStream;
import com.google.common.io.ByteStreams;
@ -36,6 +37,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
@ -55,9 +57,11 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
@Autowired
private IBinaryStorageEntityDao myBinaryStorageEntityDao;
@Nonnull
@Override
@Transactional(propagation = Propagation.REQUIRED)
public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, InputStream theInputStream) throws IOException {
public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType,
InputStream theInputStream, RequestDetails theRequestDetails) throws IOException {
/*
* Note on transactionality: This method used to have a propagation value of SUPPORTS and then do the actual
@ -70,17 +74,16 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
HashingInputStream hashingInputStream = createHashingInputStream(theInputStream);
CountingInputStream countingInputStream = createCountingInputStream(hashingInputStream);
String id = super.provideIdForNewBlob(theBlobIdOrNull);
BinaryStorageEntity entity = new BinaryStorageEntity();
entity.setResourceId(theResourceId.toUnqualifiedVersionless().getValue());
entity.setBlobId(id);
entity.setBlobContentType(theContentType);
entity.setPublished(publishedDate);
Session session = (Session) myEntityManager.getDelegate();
LobHelper lobHelper = session.getLobHelper();
byte[] loadedStream = IOUtils.toByteArray(countingInputStream);
String id = super.provideIdForNewBlob(theBlobIdOrNull, loadedStream, theRequestDetails, theContentType);
entity.setBlobId(id);
Blob dataBlob = lobHelper.createBlob(loadedStream);
entity.setBlob(dataBlob);
@ -105,7 +108,7 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
public StoredDetails fetchBlobDetails(IIdType theResourceId, String theBlobId) {
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
if (entityOpt.isPresent() == false) {
if (entityOpt.isEmpty()) {
return null;
}
@ -121,7 +124,7 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
@Override
public boolean writeBlob(IIdType theResourceId, String theBlobId, OutputStream theOutputStream) throws IOException {
Optional<BinaryStorageEntity> entityOpt = myBinaryStorageEntityDao.findByIdAndResourceId(theBlobId, theResourceId.toUnqualifiedVersionless().getValue());
if (entityOpt.isPresent() == false) {
if (entityOpt.isEmpty()) {
return false;
}

View File

@ -38,6 +38,7 @@ import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.QueryChunker;
@ -67,8 +68,8 @@ import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import javax.persistence.EntityManager;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
@ -106,6 +107,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
@Autowired
private IIdHelperService<JpaPid> myIdHelperService;
@SuppressWarnings("rawtypes")
@Autowired
private IMdmLinkDao myMdmLinkDao;
@ -145,7 +147,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
});
}
private LinkedHashSet<JpaPid> getPidsForPatientStyleExport(ExportPIDIteratorParameters theParams, String resourceType, String theJobId, String theChunkId, RuntimeResourceDefinition def) {
@SuppressWarnings("unchecked")
private LinkedHashSet<JpaPid> getPidsForPatientStyleExport(ExportPIDIteratorParameters theParams, String resourceType, String theJobId, String theChunkId, RuntimeResourceDefinition def) throws IOException {
LinkedHashSet<JpaPid> pids = new LinkedHashSet<>();
// Patient
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.DISABLED) {
@ -170,14 +173,15 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
Logs.getBatchTroubleshootingLog().debug("Executing query for bulk export job[{}] chunk[{}]: {}", theJobId, theChunkId, map.toNormalizedQueryString(myContext));
IResultIterator<JpaPid> resultIterator = searchBuilder.createQuery(map, searchRuntime, new SystemRequestDetails(), theParams.getPartitionIdOrAllPartitions());
int pidCount = 0;
while (resultIterator.hasNext()) {
if (pidCount % 10000 == 0) {
Logs.getBatchTroubleshootingLog().debug("Bulk export job[{}] chunk[{}] has loaded {} pids", theJobId, theChunkId, pidCount);
try (IResultIterator<JpaPid> resultIterator = searchBuilder.createQuery(map, searchRuntime, new SystemRequestDetails(), theParams.getPartitionIdOrAllPartitions())) {
int pidCount = 0;
while (resultIterator.hasNext()) {
if (pidCount % 10000 == 0) {
Logs.getBatchTroubleshootingLog().debug("Bulk export job[{}] chunk[{}] has loaded {} pids", theJobId, theChunkId, pidCount);
}
pidCount++;
pids.add(resultIterator.next());
}
pidCount++;
pids.add(resultIterator.next());
}
}
}
@ -209,7 +213,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
return referenceOrListParam;
}
private LinkedHashSet<JpaPid> getPidsForSystemStyleExport(ExportPIDIteratorParameters theParams, String theJobId, String theChunkId, RuntimeResourceDefinition theDef) {
@SuppressWarnings("unchecked")
private LinkedHashSet<JpaPid> getPidsForSystemStyleExport(ExportPIDIteratorParameters theParams, String theJobId, String theChunkId, RuntimeResourceDefinition theDef) throws IOException {
LinkedHashSet<JpaPid> pids = new LinkedHashSet<>();
// System
List<SearchParameterMap> maps = myBulkExportHelperSvc.createSearchParameterMapsForResourceType(theDef, theParams, true);
@ -219,23 +224,24 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
Logs.getBatchTroubleshootingLog().debug("Executing query for bulk export job[{}] chunk[{}]: {}", theJobId, theChunkId, map.toNormalizedQueryString(myContext));
// requires a transaction
IResultIterator<JpaPid> resultIterator = searchBuilder.createQuery(map,
try (IResultIterator<JpaPid> resultIterator = searchBuilder.createQuery(map,
new SearchRuntimeDetails(null, theJobId),
null,
theParams.getPartitionIdOrAllPartitions());
int pidCount = 0;
while (resultIterator.hasNext()) {
if (pidCount % 10000 == 0) {
Logs.getBatchTroubleshootingLog().debug("Bulk export job[{}] chunk[{}] has loaded {} pids", theJobId, theChunkId, pidCount);
theParams.getPartitionIdOrAllPartitions())) {
int pidCount = 0;
while (resultIterator.hasNext()) {
if (pidCount % 10000 == 0) {
Logs.getBatchTroubleshootingLog().debug("Bulk export job[{}] chunk[{}] has loaded {} pids", theJobId, theChunkId, pidCount);
}
pidCount++;
pids.add(resultIterator.next());
}
pidCount++;
pids.add(resultIterator.next());
}
}
return pids;
}
private LinkedHashSet<JpaPid> getPidsForGroupStyleExport(ExportPIDIteratorParameters theParams, String theResourceType, RuntimeResourceDefinition theDef) {
private LinkedHashSet<JpaPid> getPidsForGroupStyleExport(ExportPIDIteratorParameters theParams, String theResourceType, RuntimeResourceDefinition theDef) throws IOException {
LinkedHashSet<JpaPid> pids;
if (theResourceType.equalsIgnoreCase("Patient")) {
@ -250,17 +256,28 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
return pids;
}
private LinkedHashSet<JpaPid> getRelatedResourceTypePids(ExportPIDIteratorParameters theParams, RuntimeResourceDefinition theDef) {
private LinkedHashSet<JpaPid> getRelatedResourceTypePids(ExportPIDIteratorParameters theParams, RuntimeResourceDefinition theDef) throws IOException {
LinkedHashSet<JpaPid> pids = new LinkedHashSet<>();
// expand the group pid -> list of patients in that group (list of patient pids)
Set<JpaPid> expandedMemberResourceIds = expandAllPatientPidsFromGroup(theParams);
assert expandedMemberResourceIds != null && !expandedMemberResourceIds.isEmpty();
assert !expandedMemberResourceIds.isEmpty();
Logs.getBatchTroubleshootingLog().debug("{} has been expanded to members:[{}]", theParams.getGroupId(), expandedMemberResourceIds);
//Next, let's search for the target resources, with their correct patient references, chunked.
//The results will be jammed into myReadPids
// for each patient pid ->
// search for the target resources, with their correct patient references, chunked.
// The results will be jammed into myReadPids
QueryChunker<JpaPid> queryChunker = new QueryChunker<>();
queryChunker.chunk(expandedMemberResourceIds, QUERY_CHUNK_SIZE, (idChunk) -> {
queryResourceTypeWithReferencesToPatients(pids, idChunk, theParams, theDef);
try {
queryResourceTypeWithReferencesToPatients(pids, idChunk, theParams, theDef);
} catch (IOException ex) {
// we will never see this;
// SearchBuilder#QueryIterator does not (nor can ever) throw
// an IOException... but Java requires the check,
// so we'll put a log here (just in the off chance)
ourLog.error("Couldn't close query iterator ", ex);
throw new RuntimeException(Msg.code(2346) + "Couldn't close query iterator", ex);
}
});
return pids;
}
@ -333,7 +350,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
* In case we are doing a Group Bulk Export and resourceType `Patient` is requested, we can just return the group members,
* possibly expanded by MDM, and don't have to go and fetch other resource DAOs.
*/
private LinkedHashSet<JpaPid> getExpandedPatientList(ExportPIDIteratorParameters theParameters) {
@SuppressWarnings("unchecked")
private LinkedHashSet<JpaPid> getExpandedPatientList(ExportPIDIteratorParameters theParameters) throws IOException {
List<JpaPid> members = getMembersFromGroupWithFilter(theParameters, true);
List<IIdType> ids = members.stream().map(member -> new IdDt("Patient/" + member)).collect(Collectors.toList());
ourLog.info("While extracting patients from a group, we found {} patients.", ids.size());
@ -362,7 +380,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
*
* @return A list of strings representing the Patient IDs of the members (e.g. ["P1", "P2", "P3"]
*/
private List<JpaPid> getMembersFromGroupWithFilter(ExportPIDIteratorParameters theParameters, boolean theConsiderSince) {
@SuppressWarnings("unchecked")
private List<JpaPid> getMembersFromGroupWithFilter(ExportPIDIteratorParameters theParameters, boolean theConsiderSince) throws IOException {
RuntimeResourceDefinition def = myContext.getResourceDefinition("Patient");
List<JpaPid> resPids = new ArrayList<>();
@ -373,13 +392,14 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
for (SearchParameterMap map : maps) {
ISearchBuilder<JpaPid> searchBuilder = getSearchBuilderForResourceType("Patient");
ourLog.debug("Searching for members of group {} with job instance {} with map {}", theParameters.getGroupId(), theParameters.getInstanceId(), map);
IResultIterator<JpaPid> resultIterator = searchBuilder.createQuery(map,
try (IResultIterator<JpaPid> resultIterator = searchBuilder.createQuery(map,
new SearchRuntimeDetails(null, theParameters.getInstanceId()),
null,
theParameters.getPartitionIdOrAllPartitions());
theParameters.getPartitionIdOrAllPartitions())) {
while (resultIterator.hasNext()) {
resPids.add(resultIterator.next());
while (resultIterator.hasNext()) {
resPids.add(resultIterator.next());
}
}
}
return resPids;
@ -401,6 +421,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
/**
* @param thePidTuples
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private void populateMdmResourceCache(List<MdmPidTuple<JpaPid>> thePidTuples) {
if (myMdmExpansionCacheSvc.hasBeenPopulated()) {
return;
@ -443,14 +464,16 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
}
}
// gets all the resources related to each patient provided in the list of thePatientPids
@SuppressWarnings("unchecked")
private void queryResourceTypeWithReferencesToPatients(Set<JpaPid> theReadPids,
List<JpaPid> JpaPidChunk,
List<JpaPid> thePatientPids,
ExportPIDIteratorParameters theParams,
RuntimeResourceDefinition theDef) {
RuntimeResourceDefinition theDef) throws IOException {
//Convert Resource Persistent IDs to actual client IDs.
Set<JpaPid> pidSet = new HashSet<>(JpaPidChunk);
Set<String> resourceIds = myIdHelperService.translatePidsToFhirResourceIds(pidSet);
Set<JpaPid> pidSet = new HashSet<>(thePatientPids);
Set<String> patientIds = myIdHelperService.translatePidsToFhirResourceIds(pidSet);
//Build SP map
//First, inject the _typeFilters and _since from the export job
@ -461,29 +484,49 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
validateSearchParametersForGroup(expandedSpMap, theParams.getResourceType());
// Fetch and cache a search builder for this resource type
// filter by ResourceType
ISearchBuilder<JpaPid> searchBuilder = getSearchBuilderForResourceType(theParams.getResourceType());
// Now, further filter the query with patient references defined by the chunk of IDs we have.
// filter by PatientIds
if (PATIENT_BULK_EXPORT_FORWARD_REFERENCE_RESOURCE_TYPES.contains(theParams.getResourceType())) {
filterSearchByHasParam(resourceIds, expandedSpMap, theParams);
filterSearchByHasParam(patientIds, expandedSpMap, theParams);
} else {
filterSearchByResourceIds(resourceIds, expandedSpMap, theParams);
filterSearchByResourceIds(patientIds, expandedSpMap, theParams);
}
//Execute query and all found pids to our local iterator.
RequestPartitionId partitionId = theParams.getPartitionIdOrAllPartitions();
IResultIterator<JpaPid> resultIterator = searchBuilder.createQuery(expandedSpMap,
try (IResultIterator<JpaPid> resultIterator = searchBuilder.createQuery(expandedSpMap,
new SearchRuntimeDetails(null, theParams.getInstanceId()),
null,
partitionId);
while (resultIterator.hasNext()) {
theReadPids.add(resultIterator.next());
partitionId)) {
while (resultIterator.hasNext()) {
theReadPids.add(resultIterator.next());
}
}
// Construct our Includes filter
// We use this to recursively fetch resources of interest
// (but should only request those the user has requested/can see)
Set<Include> includes = new HashSet<>();
for (String resourceType : theParams.getRequestedResourceTypes()) {
includes.add(new Include(resourceType + ":*", true));
}
// add _include to results to support ONC
Set<Include> includes = Collections.singleton(new Include("*", true));
SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId);
Set<JpaPid> includeIds = searchBuilder.loadIncludes(myContext, myEntityManager, theReadPids, includes, false, expandedSpMap.getLastUpdated(), theParams.getInstanceId(), requestDetails, null);
SearchBuilderLoadIncludesParameters<JpaPid> loadIncludesParameters = new SearchBuilderLoadIncludesParameters<>();
loadIncludesParameters.setFhirContext(myContext);
loadIncludesParameters.setMatches(theReadPids);
loadIncludesParameters.setEntityManager(myEntityManager);
loadIncludesParameters.setRequestDetails(requestDetails);
loadIncludesParameters.setIncludeFilters(includes);
loadIncludesParameters.setReverseMode(false);
loadIncludesParameters.setLastUpdated(expandedSpMap.getLastUpdated());
loadIncludesParameters.setSearchIdOrDescription(theParams.getInstanceId());
loadIncludesParameters.setDesiredResourceTypes(theParams.getRequestedResourceTypes());
Set<JpaPid> includeIds = searchBuilder.loadIncludes(loadIncludesParameters);
// gets rid of the Patient duplicates
theReadPids.addAll(includeIds.stream().filter((id) -> !id.getResourceType().equals("Patient")).collect(Collectors.toSet()));
}
@ -530,7 +573,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
*
* @return a Set of Strings representing the resource IDs of all members of a group.
*/
private Set<JpaPid> expandAllPatientPidsFromGroup(ExportPIDIteratorParameters theParams) {
private Set<JpaPid> expandAllPatientPidsFromGroup(ExportPIDIteratorParameters theParams) throws IOException {
Set<JpaPid> expandedIds = new HashSet<>();
RequestPartitionId partitionId = theParams.getPartitionIdOrAllPartitions();
SystemRequestDetails requestDetails = new SystemRequestDetails().setRequestPartitionId(partitionId);
@ -551,6 +594,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
return expandedIds;
}
@SuppressWarnings({"rawtypes", "unchecked"})
private Set<JpaPid> performMembershipExpansionViaMdmTable(JpaPid pidOrNull) {
List<MdmPidTuple<JpaPid>> goldenPidTargetPidTuples = myMdmLinkDao.expandPidsFromGroupPidGivenMatchResult(pidOrNull, MdmMatchResultEnum.MATCH);
//Now lets translate these pids into resource IDs

View File

@ -1336,10 +1336,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
provenance.setResourceTable(theEntity);
provenance.setPartitionId(theEntity.getPartitionId());
if (haveRequestId) {
provenance.setRequestId(left(requestId, Constants.REQUEST_ID_LENGTH));
String persistedRequestId = left(requestId, Constants.REQUEST_ID_LENGTH);
provenance.setRequestId(persistedRequestId);
historyEntry.setRequestId(persistedRequestId);
}
if (haveSource) {
provenance.setSourceUri(source);
String persistedSource = left(source, ResourceHistoryTable.SOURCE_URI_LENGTH);
provenance.setSourceUri(persistedSource);
historyEntry.setSourceUri(persistedSource);
}
if (theResource != null) {
MetaUtil.populateResourceSource(myFhirContext, shouldStoreSource ? source : null, shouldStoreRequestId ? requestId : null , theResource);

View File

@ -423,7 +423,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
String resourceIdBeforeStorage = theResource.getIdElement().getIdPart();
boolean resourceHadIdBeforeStorage = isNotBlank(resourceIdBeforeStorage);
boolean resourceIdWasServerAssigned = theResource.getUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED) == Boolean.TRUE;
entity.setFhirId(resourceIdBeforeStorage);
if (resourceHadIdBeforeStorage) {
entity.setFhirId(resourceIdBeforeStorage);
}
HookParams hookParams;
@ -1439,8 +1441,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
reindexOptimizeStorageHistoryEntity(entity, historyEntity);
if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) {
int pageSize = 100;
for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) {
Slice<ResourceHistoryTable> historyEntities = myResourceHistoryTableDao.findForResourceIdAndReturnEntities(PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion());
for (int page = 0; ((long)page * pageSize) < entity.getVersion(); page++) {
Slice<ResourceHistoryTable> historyEntities = myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance(PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion());
for (ResourceHistoryTable next : historyEntities) {
reindexOptimizeStorageHistoryEntity(entity, next);
}
@ -1450,16 +1452,30 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
private void reindexOptimizeStorageHistoryEntity(ResourceTable entity, ResourceHistoryTable historyEntity) {
boolean changed = false;
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC || historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
byte[] resourceBytes = historyEntity.getResource();
if (resourceBytes != null) {
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
if (myStorageSettings.getInlineResourceTextBelowSize() > 0 && resourceText.length() < myStorageSettings.getInlineResourceTextBelowSize()) {
ourLog.debug("Storing text of resource {} version {} as inline VARCHAR", entity.getResourceId(), historyEntity.getVersion());
myResourceHistoryTableDao.setResourceTextVcForVersion(historyEntity.getId(), resourceText);
historyEntity.setResourceTextVc(resourceText);
historyEntity.setResource(null);
historyEntity.setEncoding(ResourceEncodingEnum.JSON);
changed = true;
}
}
}
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
if (historyEntity.getProvenance() != null) {
historyEntity.setSourceUri(historyEntity.getProvenance().getSourceUri());
historyEntity.setRequestId(historyEntity.getProvenance().getRequestId());
changed = true;
}
}
if (changed) {
myResourceHistoryTableDao.save(historyEntity);
}
}
private BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest, RequestPartitionId requestPartitionId) {

View File

@ -26,6 +26,7 @@ import org.hl7.fhir.instance.model.api.IBaseBooleanDatatype;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

View File

@ -64,6 +64,17 @@ public interface IBatch2JobInstanceRepository extends JpaRepository<Batch2JobIns
Pageable thePageable
);
@Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myStatus = :status")
List<Batch2JobInstanceEntity> findInstancesByJobStatus(
@Param("status") StatusEnum theState,
Pageable thePageable
);
@Query("SELECT count(b) from Batch2JobInstanceEntity b WHERE b.myStatus = :status")
Integer findTotalJobsOfStatus(
@Param("status") StatusEnum theState
);
@Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myStatus IN( :stats ) AND b.myEndTime < :cutoff")
List<Batch2JobInstanceEntity> findInstancesByJobIdAndStatusAndExpiry(
@Param("defId") String theDefinitionId,

View File

@ -44,8 +44,8 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
@Query("SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion")
Slice<Long> findForResourceId(Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion")
Slice<ResourceHistoryTable> findForResourceIdAndReturnEntities(Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query("SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :resId AND t.myResourceVersion != :dontWantVersion")
Slice<ResourceHistoryTable> findForResourceIdAndReturnEntitiesAndFetchProvenance(Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query("" +
"SELECT v.myId FROM ResourceHistoryTable v " +
@ -67,13 +67,6 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
"WHERE v.myResourceVersion != t.myVersion")
Slice<Long> findIdsOfPreviousVersionsOfResources(Pageable thePage);
/**
* Sets the inline text and clears the LOB copy of the text
*/
@Modifying
@Query("UPDATE ResourceHistoryTable as t SET t.myResource = null, t.myResourceTextVc = :text WHERE t.myId = :pid")
void setResourceTextVcForVersion(@Param("pid") Long id, @Param("text") String resourceText);
@Modifying
@Query("UPDATE ResourceHistoryTable r SET r.myResourceVersion = :newVersion WHERE r.myResourceId = :id AND r.myResourceVersion = :oldVersion")
void updateVersion(@Param("id") long theId, @Param("oldVersion") long theOldVersion, @Param("newVersion") long theNewVersion);
@ -81,4 +74,5 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
@Modifying
@Query("DELETE FROM ResourceHistoryTable t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);
}

View File

@ -145,4 +145,7 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long>, I
@Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IN (:partitionIds) AND t.myId = :pid")
Optional<ResourceTable> readByPartitionIds(@Param("partitionIds") Collection<Integer> thrValues, @Param("pid") Long theResourceId);
@Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myId IN :pids")
List<ResourceTable> findAllByIdAndLoadForcedIds(@Param("pids") List<Long> thePids);
}

View File

@ -236,8 +236,11 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
@Override
@Transactional
public void expungeHistoricalVersionsOfIds(RequestDetails theRequestDetails, List<JpaPid> theResourceIds, AtomicInteger theRemainingCount) {
for (JpaPid next : theResourceIds) {
expungeHistoricalVersionsOfId(theRequestDetails, (next).getId(), theRemainingCount);
List<Long> pids = JpaPid.toLongList(theResourceIds);
List<ResourceTable> resourcesToDelete = myResourceTableDao.findAllByIdAndLoadForcedIds(pids);
for (ResourceTable next : resourcesToDelete) {
expungeHistoricalVersionsOfId(theRequestDetails, next, theRemainingCount);
if (expungeLimitReached(theRemainingCount)) {
return;
}
@ -267,12 +270,12 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
deleteAllSearchParams(JpaPid.fromId(resource.getResourceId()));
myResourceTagDao.deleteByResourceId(resource.getId());
if (resource.isHasTags()) {
myResourceTagDao.deleteByResourceId(resource.getId());
}
if (resource.getForcedId() != null) {
ForcedId forcedId = resource.getForcedId();
resource.setForcedId(null);
myResourceTableDao.saveAndFlush(resource);
myForcedIdDao.deleteByPid(forcedId.getId());
}
@ -323,7 +326,7 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
}
}
private void expungeHistoricalVersionsOfId(RequestDetails theRequestDetails, Long myResourceId, AtomicInteger theRemainingCount) {
private void expungeHistoricalVersionsOfId(RequestDetails theRequestDetails, ResourceTable theResource, AtomicInteger theRemainingCount) {
Pageable page;
synchronized (theRemainingCount) {
if (expungeLimitReached(theRemainingCount)) {
@ -332,10 +335,8 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
page = PageRequest.of(0, theRemainingCount.get());
}
ResourceTable resource = myResourceTableDao.findById(myResourceId).orElseThrow(IllegalArgumentException::new);
Slice<Long> versionIds = myResourceHistoryTableDao.findForResourceId(page, resource.getId(), resource.getVersion());
ourLog.debug("Found {} versions of resource {} to expunge", versionIds.getNumberOfElements(), resource.getIdDt().getValue());
Slice<Long> versionIds = myResourceHistoryTableDao.findForResourceId(page, theResource.getId(), theResource.getVersion());
ourLog.debug("Found {} versions of resource {} to expunge", versionIds.getNumberOfElements(), theResource.getIdDt().getValue());
for (Long nextVersionId : versionIds) {
expungeHistoricalVersion(theRequestDetails, nextVersionId, theRemainingCount);
if (expungeLimitReached(theRemainingCount)) {

View File

@ -53,11 +53,11 @@ import java.util.Date;
@Table(name = "MPI_LINK", uniqueConstraints = {
// TODO GGG DROP this index, and instead use the below one
@UniqueConstraint(name = "IDX_EMPI_PERSON_TGT", columnNames = {"PERSON_PID", "TARGET_PID"}),
// v---- this one
//TODO GGG revisit adding this: @UniqueConstraint(name = "IDX_EMPI_GR_TGT", columnNames = {"GOLDEN_RESOURCE_PID", "TARGET_PID"}),
//TODO GGG Should i make individual indices for PERSON/TARGET?
}, indexes = {
@Index(name = "IDX_EMPI_MATCH_TGT_VER", columnList = "MATCH_RESULT, TARGET_PID, VERSION")
@Index(name = "IDX_EMPI_MATCH_TGT_VER", columnList = "MATCH_RESULT, TARGET_PID, VERSION"),
// v---- this one
@Index(name = "IDX_EMPI_GR_TGT", columnList = "GOLDEN_RESOURCE_PID, TARGET_PID")
})
@Audited
// This is the table name generated by default by envers, but we set it explicitly for clarity

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.api.Constants;
@ -43,6 +44,7 @@ import javax.persistence.TemporalType;
import java.io.Serializable;
import java.util.Date;
@SuppressWarnings("SqlDialectInspection")
@Entity
@Immutable
@Subselect("SELECT h.pid as pid, " +
@ -83,7 +85,7 @@ public class ResourceSearchView implements IBaseResourceEntity, Serializable {
private Long myResourceVersion;
@Column(name = "PROV_REQUEST_ID", length = Constants.REQUEST_ID_LENGTH)
private String myProvenanceRequestId;
@Column(name = "PROV_SOURCE_URI", length = ResourceHistoryProvenanceEntity.SOURCE_URI_LENGTH)
@Column(name = "PROV_SOURCE_URI", length = ResourceHistoryTable.SOURCE_URI_LENGTH)
private String myProvenanceSourceUri;
@Column(name = "HAS_TAGS")
private boolean myHasTags;

View File

@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
@ -44,6 +45,7 @@ import ca.uhn.fhir.util.ClasspathUtil;
import ca.uhn.fhir.util.VersionEnum;
import software.amazon.awssdk.utils.StringUtils;
import javax.persistence.Index;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@ -109,7 +111,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.unique(false)
.online(true)
.withColumns("RES_ID", "RES_UPDATED", "PARTITION_ID");
Builder.BuilderWithTableName tagDefTable = version.onTable("HFJ_TAG_DEF");
tagDefTable.dropIndex("20230505.1", "IDX_TAGDEF_TYPESYSCODEVERUS");
@ -120,7 +122,41 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.online(false)
.withColumns("TAG_TYPE", "TAG_CODE", "TAG_SYSTEM", "TAG_ID", "TAG_VERSION", "TAG_USER_SELECTED");
// This migration is failing in Oracle because there is already an index created on column RES_VER_PID since it is a primary key.
// IDX_RESVERPROV_RESVER_PID is removed in 20230523.1
version
.onTable("HFJ_RES_VER_PROV")
.addIndex("20230510.1", "IDX_RESVERPROV_RESVER_PID")
.unique(false)
.withColumns("RES_VER_PID")
.failureAllowed();
version
.onTable("HFJ_RES_VER_PROV")
.addIndex("20230510.2", "IDX_RESVERPROV_RES_PID")
.unique(false)
.withColumns("RES_PID");
version
.onTable(ResourceHistoryTable.HFJ_RES_VER)
.addColumn("20230510.4", "SOURCE_URI")
.nullable()
.type(ColumnTypeEnum.STRING, 100);
version
.onTable(ResourceHistoryTable.HFJ_RES_VER)
.addColumn("20230510.5", "REQUEST_ID")
.nullable()
.type(ColumnTypeEnum.STRING, 16);
version
.onTable("HFJ_RES_VER_PROV")
.addForeignKey("20230510.6", "FK_RESVERPROV_RES_PID")
.toColumn("RES_PID")
.references("HFJ_RESOURCE", "RES_ID");
version
.onTable("HFJ_RES_VER_PROV")
.dropIndex("20230523.1", "IDX_RESVERPROV_RESVER_PID");
}
@ -355,6 +391,14 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
linkTable.addForeignKey("20230424.5", "FK_RESLINK_TARGET")
.toColumn("TARGET_RESOURCE_ID").references("HFJ_RESOURCE", "RES_ID");
}
{
version.onTable("MPI_LINK")
.addIndex("20230504.1", "IDX_EMPI_GR_TGT")
.unique(false)
.withColumns("GOLDEN_RESOURCE_PID", "TARGET_PID");
}
}
protected void init640() {
@ -903,11 +947,11 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
.unique(false)
.withColumns("RES_ID");
theVersion.onTable("HFJ_RES_VER_PROV")
.addIndex("20211210.3", "FK_RESVERPROV_RES_PID")
.unique(false)
.withColumns("RES_PID")
.doNothing() // This index is added below in a better form
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
theVersion.onTable("HFJ_FORCED_ID")
@ -1526,7 +1570,8 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
resVerProv
.addForeignKey("20190921.15", "FK_RESVERPROV_RES_PID")
.toColumn("RES_PID")
.references("HFJ_RESOURCE", "RES_ID");
.references("HFJ_RESOURCE", "RES_ID")
.doNothing(); // Added below in a better form
resVerProv.addColumn("SOURCE_URI").nullable().type(ColumnTypeEnum.STRING, 100);
resVerProv.addColumn("REQUEST_ID").nullable().type(ColumnTypeEnum.STRING, 16);
resVerProv.addIndex("20190921.16", "IDX_RESVERPROV_SOURCEURI").unique(false).withColumns("SOURCE_URI");

View File

@ -48,8 +48,10 @@ import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.SearchConstants;
@ -79,6 +81,7 @@ import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
@ -95,6 +98,7 @@ import com.healthmarketscience.sqlbuilder.Condition;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
@ -105,6 +109,7 @@ import org.springframework.jdbc.core.SingleColumnRowMapper;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
@ -126,7 +131,6 @@ import java.util.Set;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION;
import static org.apache.commons.lang3.StringUtils.countMatches;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -1108,26 +1112,62 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* The JpaPid returned will have resource type populated.
*/
@Override
public Set<JpaPid> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<JpaPid> theMatches, Collection<Include> theIncludes,
boolean theReverseMode, DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest, Integer theMaxCount) {
if (theMatches.size() == 0) {
public Set<JpaPid> loadIncludes(
FhirContext theContext,
EntityManager theEntityManager,
Collection<JpaPid> theMatches,
Collection<Include> theIncludes,
boolean theReverseMode,
DateRangeParam theLastUpdated,
String theSearchIdOrDescription,
RequestDetails theRequest,
Integer theMaxCount
) {
SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>();
parameters.setFhirContext(theContext);
parameters.setEntityManager(theEntityManager);
parameters.setMatches(theMatches);
parameters.setIncludeFilters(theIncludes);
parameters.setReverseMode(theReverseMode);
parameters.setLastUpdated(theLastUpdated);
parameters.setSearchIdOrDescription(theSearchIdOrDescription);
parameters.setRequestDetails(theRequest);
parameters.setMaxCount(theMaxCount);
return loadIncludes(parameters);
}
@Override
public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) {
Collection<JpaPid> matches = theParameters.getMatches();
Collection<Include> currentIncludes = theParameters.getIncludeFilters();
boolean reverseMode = theParameters.isReverseMode();
EntityManager entityManager = theParameters.getEntityManager();
Integer maxCount = theParameters.getMaxCount();
FhirContext fhirContext = theParameters.getFhirContext();
DateRangeParam lastUpdated = theParameters.getLastUpdated();
RequestDetails request = theParameters.getRequestDetails();
String searchIdOrDescription = theParameters.getSearchIdOrDescription();
List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes();
boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty();
if (matches.size() == 0) {
return new HashSet<>();
}
if (theIncludes == null || theIncludes.isEmpty()) {
if (currentIncludes == null || currentIncludes.isEmpty()) {
return new HashSet<>();
}
String searchPidFieldName = theReverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID;
String findPidFieldName = theReverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID;
String findResourceTypeFieldName = theReverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE;
String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID;
String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID;
String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE;
String findVersionFieldName = null;
if (!theReverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) {
if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) {
findVersionFieldName = MY_TARGET_RESOURCE_VERSION;
}
List<JpaPid> nextRoundMatches = new ArrayList<>(theMatches);
List<JpaPid> nextRoundMatches = new ArrayList<>(matches);
HashSet<JpaPid> allAdded = new HashSet<>();
HashSet<JpaPid> original = new HashSet<>(theMatches);
ArrayList<Include> includes = new ArrayList<>(theIncludes);
HashSet<JpaPid> original = new HashSet<>(matches);
ArrayList<Include> includes = new ArrayList<>(currentIncludes);
int roundCounts = 0;
StopWatch w = new StopWatch();
@ -1161,42 +1201,62 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
sqlBuilder.append("SELECT r.").append(findPidFieldName);
sqlBuilder.append(", r.").append(findResourceTypeFieldName);
if (findVersionFieldName != null) {
sqlBuilder.append(", r." + findVersionFieldName);
sqlBuilder.append(", r.").append(findVersionFieldName);
}
sqlBuilder.append(" FROM ResourceLink r WHERE ");
sqlBuilder.append("r.");
sqlBuilder.append(searchPidFieldName);
sqlBuilder.append(searchPidFieldName); // (rev mode) target_resource_id | source_resource_id
sqlBuilder.append(" IN (:target_pids)");
// Technically if the request is a qualified star (e.g. _include=Observation:*) we
// should always be checking the source resource type on the resource link. We don't
// actually index that column though by default, so in order to try and be efficient
// we don't actually include it for includes (but we do for revincludes). This is
// because for an include it doesn't really make sense to include a different
// resource type than the one you are searching on.
if (wantResourceType != null && theReverseMode) {
/*
* We need to set the resource type in 2 cases only:
* 1) we are in $everything mode
* (where we only want to fetch specific resource types, regardless of what is
* available to fetch)
* 2) we are doing revincludes
*
* Technically if the request is a qualified star (e.g. _include=Observation:*) we
* should always be checking the source resource type on the resource link. We don't
* actually index that column though by default, so in order to try and be efficient
* we don't actually include it for includes (but we do for revincludes). This is
* because for an include, it doesn't really make sense to include a different
* resource type than the one you are searching on.
*/
if (wantResourceType != null
&& (reverseMode || (myParams != null && myParams.getEverythingMode() != null))
) {
// because mySourceResourceType is not part of the HFJ_RES_LINK
// index, this might not be the most optimal performance.
// but it is for an $everything operation (and maybe we should update the index)
sqlBuilder.append(" AND r.mySourceResourceType = :want_resource_type");
} else {
wantResourceType = null;
}
// When calling $everything on a Patient instance, we don't want to recurse into new Patient resources
// (e.g. via Provenance, List, or Group) when in an $everything operation
if (myParams != null && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) {
sqlBuilder.append(" AND r.myTargetResourceType != 'Patient'");
sqlBuilder.append(" AND r.mySourceResourceType != 'Provenance'");
}
if (hasDesiredResourceTypes) {
sqlBuilder.append(" AND r.myTargetResourceType IN (:desired_target_resource_types)");
}
String sql = sqlBuilder.toString();
List<Collection<JpaPid>> partitions = partition(nextRoundMatches, getMaximumPageSize());
for (Collection<JpaPid> nextPartition : partitions) {
TypedQuery<?> q = theEntityManager.createQuery(sql, Object[].class);
TypedQuery<?> q = entityManager.createQuery(sql, Object[].class);
q.setParameter("target_pids", JpaPid.toLongList(nextPartition));
if (wantResourceType != null) {
q.setParameter("want_resource_type", wantResourceType);
}
if (theMaxCount != null) {
q.setMaxResults(theMaxCount);
if (maxCount != null) {
q.setMaxResults(maxCount);
}
if (hasDesiredResourceTypes) {
q.setParameter("desired_target_resource_types", String.join(", ", desiredResourceTypes));
}
List<?> results = q.getResultList();
for (Object nextRow : results) {
@ -1220,7 +1280,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
}
} else {
List<String> paths;
// Start replace
@ -1229,7 +1288,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (isBlank(resType)) {
continue;
}
RuntimeResourceDefinition def = theContext.getResourceDefinition(resType);
RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType);
if (def == null) {
ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue());
continue;
@ -1249,77 +1308,58 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
paths = param.getPathsSplitForResourceType(resType);
// end replace
String targetResourceType = defaultString(nextInclude.getParamTargetType(), null);
Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param);
for (String nextPath : paths) {
boolean haveTargetTypesDefinedByParam = param.hasTargets();
String findPidFieldSqlColumn = findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id";
String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS;
if (findVersionFieldName != null) {
fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS;
}
// Query for includes lookup has consider 2 cases
// Query for includes lookup has 2 cases
// Case 1: Where target_resource_id is available in hfj_res_link table for local references
// Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical url in target_resource_url
// Case 1:
Map<String, Object> localReferenceQueryParams = new HashMap<>();
String searchPidFieldSqlColumn = searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id";
StringBuilder resourceIdBasedQuery = new StringBuilder("SELECT " + fieldsToLoad +
StringBuilder localReferenceQuery = new StringBuilder("SELECT " + fieldsToLoad +
" FROM hfj_res_link r " +
" WHERE r.src_path = :src_path AND " +
" r.target_resource_id IS NOT NULL AND " +
" r." + searchPidFieldSqlColumn + " IN (:target_pids) ");
if (targetResourceType != null) {
resourceIdBasedQuery.append(" AND r.target_resource_type = :target_resource_type ");
} else if (haveTargetTypesDefinedByParam) {
resourceIdBasedQuery.append(" AND r.target_resource_type in (:target_resource_types) ");
localReferenceQueryParams.put("src_path", nextPath);
// we loop over target_pids later.
if (targetResourceTypes != null) {
if (targetResourceTypes.size() == 1) {
localReferenceQuery.append(" AND r.target_resource_type = :target_resource_type ");
localReferenceQueryParams.put("target_resource_type", targetResourceTypes.iterator().next());
} else {
localReferenceQuery.append(" AND r.target_resource_type in (:target_resource_types) ");
localReferenceQueryParams.put("target_resource_types", targetResourceTypes);
}
}
// Case 2:
String fieldsToLoadFromSpidxUriTable = "rUri.res_id";
// to match the fields loaded in union
if (fieldsToLoad.split(",").length > 1) {
for (int i = 0; i < fieldsToLoad.split(",").length - 1; i++) {
fieldsToLoadFromSpidxUriTable += ", NULL";
}
}
//@formatter:off
StringBuilder resourceUrlBasedQuery = new StringBuilder("SELECT " + fieldsToLoadFromSpidxUriTable +
" FROM hfj_res_link r " +
" JOIN hfj_spidx_uri rUri ON ( " +
" r.target_resource_url = rUri.sp_uri AND " +
" rUri.sp_name = 'url' ");
Pair<String, Map<String, Object>> canonicalQuery = buildCanonicalUrlQuery(findVersionFieldName, searchPidFieldSqlColumn, targetResourceTypes);
if (targetResourceType != null) {
resourceUrlBasedQuery.append(" AND rUri.res_type = :target_resource_type ");
} else if (haveTargetTypesDefinedByParam) {
resourceUrlBasedQuery.append(" AND rUri.res_type IN (:target_resource_types) ");
}
resourceUrlBasedQuery.append(" ) ");
resourceUrlBasedQuery.append(
" WHERE r.src_path = :src_path AND " +
" r.target_resource_id IS NULL AND " +
" r." + searchPidFieldSqlColumn + " IN (:target_pids) ");
//@formatter:on
String sql = resourceIdBasedQuery + " UNION " + resourceUrlBasedQuery;
String sql = localReferenceQuery + " UNION " + canonicalQuery.getLeft();
List<Collection<JpaPid>> partitions = partition(nextRoundMatches, getMaximumPageSize());
for (Collection<JpaPid> nextPartition : partitions) {
Query q = theEntityManager.createNativeQuery(sql, Tuple.class);
q.setParameter("src_path", nextPath);
Query q = entityManager.createNativeQuery(sql, Tuple.class);
q.setParameter("target_pids", JpaPid.toLongList(nextPartition));
if (targetResourceType != null) {
q.setParameter("target_resource_type", targetResourceType);
} else if (haveTargetTypesDefinedByParam) {
q.setParameter("target_resource_types", param.getTargets());
}
localReferenceQueryParams.forEach(q::setParameter);
canonicalQuery.getRight().forEach(q::setParameter);
if (theMaxCount != null) {
q.setMaxResults(theMaxCount);
if (maxCount != null) {
q.setMaxResults(maxCount);
}
@SuppressWarnings("unchecked")
List<Tuple> results = q.getResultList();
for (Tuple result : results) {
if (result != null) {
@ -1336,44 +1376,38 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
}
if (theReverseMode) {
if (theLastUpdated != null && (theLastUpdated.getLowerBoundAsInstant() != null || theLastUpdated.getUpperBoundAsInstant() != null)) {
pidsToInclude = new HashSet<>(QueryParameterUtils.filterResourceIdsByLastUpdated(theEntityManager, theLastUpdated, pidsToInclude));
}
}
nextRoundMatches.clear();
for (JpaPid next : pidsToInclude) {
if (original.contains(next) == false && allAdded.contains(next) == false) {
if ( !original.contains(next) && !allAdded.contains(next) ) {
nextRoundMatches.add(next);
}
}
addedSomeThisRound = allAdded.addAll(pidsToInclude);
if (theMaxCount != null && allAdded.size() >= theMaxCount) {
if (maxCount != null && allAdded.size() >= maxCount) {
break;
}
} while (includes.size() > 0 && nextRoundMatches.size() > 0 && addedSomeThisRound);
} while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound);
allAdded.removeAll(original);
ourLog.info("Loaded {} {} in {} rounds and {} ms for search {}", allAdded.size(), theReverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart(), theSearchIdOrDescription);
ourLog.info("Loaded {} {} in {} rounds and {} ms for search {}", allAdded.size(), reverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart(), searchIdOrDescription);
// Interceptor call: STORAGE_PREACCESS_RESOURCES
// This can be used to remove results from the search result details before
// the user has a chance to know that they were in the results
if (allAdded.size() > 0) {
if (!allAdded.isEmpty()) {
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, myInterceptorBroadcaster, theRequest)) {
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, myInterceptorBroadcaster, request)) {
List<JpaPid> includedPidList = new ArrayList<>(allAdded);
JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(includedPidList, () -> this);
HookParams params = new HookParams()
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
.add(RequestDetails.class, request)
.addIfMatchesType(ServletRequestDetails.class, request);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, request, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
for (int i = includedPidList.size() - 1; i >= 0; i--) {
if (accessDetails.isDontReturnResourceAtIndex(i)) {
@ -1389,6 +1423,62 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
return allAdded;
}
@Nullable
private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) {
String targetResourceType = defaultString(nextInclude.getParamTargetType(), null);
boolean haveTargetTypesDefinedByParam = param.hasTargets();
Set<String> targetResourceTypes;
if (targetResourceType != null) {
targetResourceTypes = Set.of(targetResourceType);
} else if (haveTargetTypesDefinedByParam) {
targetResourceTypes = param.getTargets();
} else {
// all types!
targetResourceTypes = null;
}
return targetResourceTypes;
}
@Nonnull
private Pair<String, Map<String, Object>> buildCanonicalUrlQuery(String theVersionFieldName, String thePidFieldSqlColumn, Set<String> theTargetResourceTypes) {
String fieldsToLoadFromSpidxUriTable = "rUri.res_id";
if (theVersionFieldName != null) {
// canonical-uri references aren't versioned, but we need to match the column count for the UNION
fieldsToLoadFromSpidxUriTable += ", NULL";
}
// The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url.
// But sp_name isn't indexed, so we use hash_identity instead.
if (theTargetResourceTypes == null) {
// hash_identity includes the resource type. So a null wildcard must be replaced with a list of all types.
theTargetResourceTypes = myDaoRegistry.getRegisteredDaoTypes();
}
assert !theTargetResourceTypes.isEmpty();
Set<Long> identityHashesForTypes = theTargetResourceTypes.stream()
.map(type-> BaseResourceIndexedSearchParam.calculateHashIdentity(myPartitionSettings, myRequestPartitionId, type, "url"))
.collect(Collectors.toSet());
Map<String, Object> canonicalUriQueryParams = new HashMap<>();
StringBuilder canonicalUrlQuery = new StringBuilder(
"SELECT " + fieldsToLoadFromSpidxUriTable +
" FROM hfj_res_link r " +
" JOIN hfj_spidx_uri rUri ON ( ");
// join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2
if (theTargetResourceTypes.size() == 1) {
canonicalUrlQuery.append(" rUri.hash_identity = :uri_identity_hash ");
canonicalUriQueryParams.put("uri_identity_hash", identityHashesForTypes.iterator().next());
} else {
canonicalUrlQuery.append(" rUri.hash_identity in (:uri_identity_hashes) ");
canonicalUriQueryParams.put("uri_identity_hashes", identityHashesForTypes);
}
canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri )" +
" WHERE r.src_path = :src_path AND " +
" r.target_resource_id IS NULL AND " +
" r." + thePidFieldSqlColumn + " IN (:target_pids) ");
return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams);
}
private List<Collection<JpaPid>> partition(Collection<JpaPid> theNextRoundMatches, int theMaxLoad) {
if (theNextRoundMatches.size() <= theMaxLoad) {
return Collections.singletonList(theNextRoundMatches);
@ -1557,6 +1647,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
return myResourceName;
}
/**
* IncludesIterator, used to recursively fetch resources from the provided list of PIDs
*/
public class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> {
private final RequestDetails myRequest;
@ -1574,7 +1667,23 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
while (myNext == null) {
if (myCurrentIterator == null) {
Set<Include> includes = Collections.singleton(new Include("*", true));
Set<Include> includes = new HashSet<>();
if (myParams.containsKey(Constants.PARAM_TYPE)) {
for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) {
for (IQueryParameterType type : typeList) {
String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext));
for (String resourceType : queryString.split(",")) {
String rt = resourceType.trim();
if (isNotBlank(rt)) {
includes.add(new Include(rt + ":*", true));
}
}
}
}
}
if (includes.isEmpty()) {
includes.add(new Include("*", true));
}
Set<JpaPid> newPids = loadIncludes(myContext, myEntityManager, myCurrentPids, includes, false, getParams().getLastUpdated(), mySearchUuid, myRequest, null);
myCurrentIterator = newPids.iterator();
}
@ -1604,6 +1713,9 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
/**
* Basic Query iterator, used to fetch the results of a query.
*/
private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> {
private final SearchRuntimeDetails mySearchRuntimeDetails;
@ -1627,8 +1739,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
myOffset = myParams.getOffset();
myRequest = theRequest;
// Includes are processed inline for $everything query when we don't have a '_type' specified
if (myParams.getEverythingMode() != null && !myParams.containsKey(Constants.PARAM_TYPE)) {
// everything requires fetching recursively all related resources
if (myParams.getEverythingMode() != null) {
myFetchIncludesForEverythingOperation = true;
}
@ -1638,7 +1750,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
private void fetchNext() {
try {
if (myHaveRawSqlHooks) {
CurrentThreadCaptureQueriesListener.startCapturing();
@ -1656,6 +1767,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
}
// assigns the results iterator
initializeIteratorQuery(myOffset, myMaxResultsToFetch);
if (myAlsoIncludePids == null) {
@ -1663,9 +1775,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
}
if (myNext == null) {
for (Iterator<JpaPid> myPreResultsIterator = myAlsoIncludePids.iterator(); myPreResultsIterator.hasNext(); ) {
JpaPid next = myPreResultsIterator.next();
if (next != null)
@ -1724,6 +1835,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
}
if (myNext == null) {
// if we got here, it means the current PjaPid has already been processed
// and we will decide (here) if we need to fetch related resources recursively
if (myFetchIncludesForEverythingOperation) {
myIncludesIterator = new IncludesIterator(myPidSet, myRequest);
myFetchIncludesForEverythingOperation = false;
@ -1750,6 +1863,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size());
} finally {
// search finished - fire hooks
if (myHaveRawSqlHooks) {
SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing();
HookParams params = new HookParams()

View File

@ -40,6 +40,7 @@ import javax.persistence.PersistenceContextType;
import javax.persistence.Query;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Arrays;
public class SearchQueryExecutor implements ISearchQueryExecutor {
@ -119,7 +120,7 @@ public class SearchQueryExecutor implements ISearchQueryExecutor {
hibernateQuery.setParameter(i, args[i - 1]);
}
ourLog.trace("About to execute SQL: {}", sql);
ourLog.trace("About to execute SQL: {}. Parameters: {}", sql, Arrays.toString(args));
/*
* These settings help to ensure that we use a search cursor

View File

@ -2200,6 +2200,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
public IValidationSupport.CodeValidationResult validateCode(@Nonnull ValidationSupportContext theValidationSupportContext, @Nonnull ConceptValidationOptions theOptions, String theCodeSystemUrl, String theCode, String theDisplay, String theValueSetUrl) {
//TODO GGG TRY TO JUST AUTO_PASS HERE AND SEE WHAT HAPPENS.
invokeRunnableForUnitTest();
theOptions.setValidateDisplay(isNotBlank(theDisplay));
if (isNotBlank(theValueSetUrl)) {
return validateCodeInValueSet(theValidationSupportContext, theOptions, theValueSetUrl, theCodeSystemUrl, theCode, theDisplay);

View File

@ -0,0 +1,34 @@
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.jpa.subscription.match.deliver.email.EmailDetails;
import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LoggingEmailSender implements IEmailSender {
private static final Logger ourLog = LoggerFactory.getLogger(LoggingEmailSender.class);
@Override
public void send(EmailDetails theDetails) {
ourLog.info("Not sending subscription email to: {}", theDetails.getTo());
}
}

View File

@ -198,25 +198,6 @@ public class QueryParameterUtils {
return lastUpdatedPredicates;
}
public static List<JpaPid> filterResourceIdsByLastUpdated(EntityManager theEntityManager, final DateRangeParam theLastUpdated, Collection<JpaPid> thePids) {
if (thePids.isEmpty()) {
return Collections.emptyList();
}
CriteriaBuilder builder = theEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
List<Predicate> lastUpdatedPredicates = createLastUpdatedPredicates(theLastUpdated, builder, from);
List<Long> longIds = thePids.stream().map(JpaPid::getId).collect(Collectors.toList());
lastUpdatedPredicates.add(from.get("myId").as(Long.class).in(longIds));
cq.where(toPredicateArray(lastUpdatedPredicates));
TypedQuery<Long> query = theEntityManager.createQuery(cq);
return query.getResultList().stream().map(JpaPid::fromId).collect(Collectors.toList());
}
public static void verifySearchHasntFailedOrThrowInternalErrorException(Search theSearch) {
if (theSearch.getStatus() == SearchStatusEnum.FAILED) {
Integer status = theSearch.getFailureCode();

View File

@ -16,6 +16,7 @@ import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
@ -25,8 +26,6 @@ import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Group;
@ -43,7 +42,6 @@ import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -57,6 +55,7 @@ import java.util.Optional;
import java.util.Set;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@ -67,11 +66,10 @@ import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.nullable;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.hamcrest.Matchers.containsString;
@ExtendWith(MockitoExtension.class)
public class JpaBulkExportProcessorTest {
@ -366,8 +364,10 @@ public class JpaBulkExportProcessorTest {
}
// source is: "isExpandMdm,(whether or not to test on a specific partition)
@ParameterizedTest
@CsvSource({"false, false", "false, true", "true, true", "true, false"})
@SuppressWarnings({"rawtypes", "unchecked"})
public void getResourcePidIterator_groupExportStyleWithNonPatientResource_returnsIterator(boolean theMdm, boolean thePartitioned) {
// setup
ExportPIDIteratorParameters parameters = createExportParameters(BulkDataExportOptions.ExportStyle.GROUP);
@ -436,8 +436,9 @@ public class JpaBulkExportProcessorTest {
.thenReturn(observationDao);
when(mySearchBuilderFactory.newSearchBuilder(eq(observationDao), eq("Observation"), eq(Observation.class)))
.thenReturn(observationSearchBuilder);
when(observationSearchBuilder.loadIncludes(any(), any(), eq(observationPidSet), any(), eq(false), any(), any(),
any(SystemRequestDetails.class), any()))
when(observationSearchBuilder.loadIncludes(
any(SearchBuilderLoadIncludesParameters.class)
))
.thenReturn(new HashSet<>());
// ret
@ -471,10 +472,12 @@ public class JpaBulkExportProcessorTest {
ArgumentCaptor<SystemRequestDetails> groupDaoReadSystemRequestDetailsCaptor = ArgumentCaptor.forClass(SystemRequestDetails.class);
verify(groupDao).read(any(IIdType.class), groupDaoReadSystemRequestDetailsCaptor.capture());
validatePartitionId(thePartitioned, groupDaoReadSystemRequestDetailsCaptor.getValue().getRequestPartitionId());
ArgumentCaptor<SystemRequestDetails> searchBuilderLoadIncludesRequestDetailsCaptor = ArgumentCaptor.forClass(SystemRequestDetails.class);
verify(observationSearchBuilder).loadIncludes(any(), any(), eq(observationPidSet), any(), eq(false), any(), any(),
searchBuilderLoadIncludesRequestDetailsCaptor.capture(), any());
validatePartitionId(thePartitioned, searchBuilderLoadIncludesRequestDetailsCaptor.getValue().getRequestPartitionId());
ArgumentCaptor<SearchBuilderLoadIncludesParameters> searchBuilderLoadIncludesRequestDetailsCaptor = ArgumentCaptor.forClass(SearchBuilderLoadIncludesParameters.class);
verify(observationSearchBuilder).loadIncludes(searchBuilderLoadIncludesRequestDetailsCaptor.capture());
SearchBuilderLoadIncludesParameters param = searchBuilderLoadIncludesRequestDetailsCaptor.getValue();
assertTrue(param.getRequestDetails() instanceof SystemRequestDetails);
SystemRequestDetails details = (SystemRequestDetails) param.getRequestDetails();
validatePartitionId(thePartitioned, details.getRequestPartitionId());
}
@ParameterizedTest

View File

@ -949,12 +949,6 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
}
}
private Consumer<IBaseResource>[] asArray(Consumer<IBaseResource> theIBaseResourceConsumer) {
@SuppressWarnings("unchecked")
Consumer<IBaseResource>[] array = (Consumer<IBaseResource>[]) new Consumer[]{theIBaseResourceConsumer};
return array;
}
private List<String> getResultIds(IBundleProvider theResult) {
return theResult.getAllResources().stream().map(r -> r.getIdElement().getIdPart()).collect(Collectors.toList());
}
@ -1005,7 +999,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
@Test
void secondWordFound() {
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Cloudy, yellow"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Cloudy, yellow"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?value-string:text=yellow");
assertThat(resourceIds, hasItem(id1));
@ -1016,9 +1010,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
// smit - matches "smit" and "smith"
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "John Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "John Smith"))).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Carl Smit"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Carl Smit"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?value-string:text=smit");
assertThat(resourceIds, hasItems(id1, id2));
@ -1030,9 +1024,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
// smit* - matches "smit" and "smith"
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "John Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "John Smith"))).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Carl Smit"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Carl Smit"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?_elements=valueString&value-string:text=smit*");
assertThat(resourceIds, hasItems(id1, id2));
@ -1044,9 +1038,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
// "smit"- matches "smit", but not "smith"
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "John Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "John Smith"))).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Carl Smit"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Carl Smit"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?value-string:text=\"smit\"");
assertThat(resourceIds, contains(id2));
@ -1056,9 +1050,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
@Test
void stringTokensAreAnded() {
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "John Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "John Smith"))).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Carl Smit"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Carl Smit"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?value-string:text=car%20smit");
assertThat(resourceIds, hasItems(id2));
@ -1072,9 +1066,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
// | Fhir Query String | Executed Query | Matches | No Match
// | Smit | Smit* | John Smith | John Smi
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "John Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "John Smith"))).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "John Smi"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "John Smi"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?value-string:text=Smit");
assertThat(resourceIds, hasItems(id1));
@ -1085,9 +1079,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
// | Fhir Query String | Executed Query | Matches | No Match | Note
// | Jo Smit | Jo* Smit* | John Smith | John Frank | Multiple bare terms are `AND`
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "John Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "John Smith"))).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "John Frank"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "John Frank"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?value-string:text=Jo%20Smit");
assertThat(resourceIds, hasItems(id1));
@ -1098,9 +1092,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
// | Fhir Query String | Executed Query | Matches | No Match | Note
// | frank &vert; john | frank &vert; john | Frank Smith | Franklin Smith | SQS characters disable prefix wildcard
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Frank Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Frank Smith"))).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Franklin Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Franklin Smith"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?value-string:text=frank|john");
assertThat(resourceIds, hasItems(id1));
@ -1111,9 +1105,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
// | Fhir Query String | Executed Query | Matches | No Match | Note
// | 'frank' | 'frank' | Frank Smith | Franklin Smith | Quoted terms are exact match
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Frank Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Frank Smith"))).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "Franklin Smith"))).getIdPart();
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "Franklin Smith"))).getIdPart();
List<String> resourceIds = myTestDaoSearch.searchForIds("/Observation?value-string:text='frank'");
assertThat(resourceIds, hasItems(id1));
@ -1805,10 +1799,10 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
@Test
public void byValueString() {
String id1 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "a-string-value-1")
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "a-string-value-1")
)).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "a-string-value-2")
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "a-string-value-2")
)).getIdPart();
myCaptureQueriesListener.clear();
@ -1950,7 +1944,7 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
myTestDataBuilder.withObservationCode("http://example.com/", "the-code-1")
)).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withPrimitiveAttribute("valueString", "a-string-value-2")
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "a-string-value-2")
)).getIdPart();
myCaptureQueriesListener.clear();
@ -2065,13 +2059,13 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
myTestDataBuilder.withObservationCode("http://example.com/", "the-code-1"),
myTestDataBuilder.withEffectiveDate("2017-01-20T03:21:47"),
myTestDataBuilder.withTag("http://example.org", "aTag"),
myTestDataBuilder.withPrimitiveAttribute("valueString", "a-string-value-1")
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "a-string-value-1")
)).getIdPart();
String id2 = myTestDataBuilder.createObservation(List.of(
myTestDataBuilder.withObservationCode("http://example.com/", "the-code-2"),
myTestDataBuilder.withEffectiveDate("2017-01-24T03:21:47"),
myTestDataBuilder.withTag("http://example.org", "aTag"),
myTestDataBuilder.withPrimitiveAttribute("valueString", "a-string-value-2")
myTestDataBuilder.withResourcePrimitiveAttribute("valueString", "a-string-value-2")
)).getIdPart();
myCaptureQueriesListener.clear();

View File

@ -0,0 +1,143 @@
package ca.uhn.fhir.mdm.batch2.clear;
import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.util.StopWatch;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.PostgreSQL9Dialect;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.postgresql.Driver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_GOLDEN_RECORD;
import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_HAPI_MDM_MANAGED;
import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS;
import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_MDM_MANAGED;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Disabled("Keeping as a sandbox to be used whenever we need a lot of MdmLinks in DB for performance testing")
@ContextConfiguration(classes = {MdmLinkSlowDeletionSandboxIT.TestDataSource.class})
public class MdmLinkSlowDeletionSandboxIT extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(MdmLinkSlowDeletionSandboxIT.class);
private final int ourMdmLinksToCreate = 1_000_000;
private final int ourLogMdmLinksEach = 1_000;
@Override
public void afterPurgeDatabase() {
// keep the generated data!
// super.afterPurgeDatabase();
}
@Disabled
@Test
void createMdmLinks() {
generatePatientsAndMdmLinks(ourMdmLinksToCreate);
long totalLinks = myMdmLinkDao.count();
ourLog.info("Total links in DB: {}", totalLinks);
assertTrue(totalLinks > 0);
}
private void generatePatientsAndMdmLinks(int theLinkCount) {
StopWatch sw = new StopWatch();
int totalMdmLinksCreated = 0;
for (int i = 0; i < theLinkCount; i++) {
List<JpaPid> patientIds = createMdmLinkPatients();
createMdmLink(patientIds.get(0), patientIds.get(1));
totalMdmLinksCreated++;
if (totalMdmLinksCreated % ourLogMdmLinksEach == 0) {
ourLog.info("Total MDM links created: {} in {} - ETA: {}", totalMdmLinksCreated, sw,
sw.getEstimatedTimeRemaining(totalMdmLinksCreated, ourMdmLinksToCreate));
}
}
}
private void createMdmLink(JpaPid thePidSource, JpaPid thePidTarget) {
MdmLink link = new MdmLink();
link.setGoldenResourcePersistenceId( thePidSource );
link.setSourcePersistenceId( thePidTarget );
Date now = new Date();
link.setCreated(now);
link.setUpdated(now);
link.setVersion("1");
link.setLinkSource(MdmLinkSourceEnum.MANUAL);
link.setMatchResult(MdmMatchResultEnum.MATCH);
link.setMdmSourceType("Patient");
link.setEidMatch(false);
link.setHadToCreateNewGoldenResource(true);
link.setRuleCount(6L);
link.setScore(.8);
link.setVector(61L);
runInTransaction(() -> myEntityManager.persist(link));
}
private List<JpaPid> createMdmLinkPatients() {
List<JpaPid> patientIds = new ArrayList<>();
for (int i = 0; i < 2; i++) {
Patient patient = new Patient();
patient.addName().setFamily(String.format("lastn-%07d", i)).addGiven(String.format("name-%07d", i));
if (i % 2 == 1) {
patient.getMeta()
.addTag(new Coding().setSystem(SYSTEM_MDM_MANAGED).setCode(CODE_HAPI_MDM_MANAGED));
} else {
patient.getMeta()
.addTag(new Coding().setSystem(SYSTEM_GOLDEN_RECORD_STATUS).setCode(CODE_GOLDEN_RECORD));
}
Long pId = myPatientDao.create(patient, new SystemRequestDetails()).getId().getIdPartAsLong();
JpaPid jpaPid = JpaPid.fromIdAndResourceType(pId, "Patient");
patientIds.add(jpaPid);
}
return patientIds;
}
@Configuration
public static class TestDataSource extends TestR4Config {
@Override
public String getHibernateDialect() {
return PostgreSQL9Dialect.class.getName();
// return Oracle12cDialect.class.getName();
}
@Override
public void setConnectionProperties(BasicDataSource theDataSource) {
theDataSource.setDriver(new Driver());
theDataSource.setUrl("jdbc:postgresql://localhost/mdm_link_perf");
theDataSource.setMaxWaitMillis(30000);
theDataSource.setUsername("cdr");
theDataSource.setPassword("smileCDR");
theDataSource.setMaxTotal(ourMaxThreads);
// theDataSource.setDriver(DriverTypeEnum.ORACLE_12C);
// theDataSource.setUrl("jdbc:oracle:thin:@localhost:1527/cdr.localdomain");
// theDataSource.setMaxWaitMillis(30000);
// theDataSource.setUsername("cdr");
// theDataSource.setPassword("smileCDR");
// theDataSource.setMaxTotal(ourMaxThreads);
}
}
}

View File

@ -34,16 +34,18 @@ import javax.persistence.ManyToOne;
import javax.persistence.MapsId;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import static ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable.SOURCE_URI_LENGTH;
@Table(name = "HFJ_RES_VER_PROV", indexes = {
@Index(name = "IDX_RESVERPROV_SOURCEURI", columnList = "SOURCE_URI"),
@Index(name = "IDX_RESVERPROV_REQUESTID", columnList = "REQUEST_ID"),
//@Index(name = "IDX_RESVERPROV_RESID", columnList = "RES_PID")
@Index(name = "IDX_RESVERPROV_RES_PID", columnList = "RES_PID")
})
@Entity
public class ResourceHistoryProvenanceEntity extends BasePartitionable {
public static final int SOURCE_URI_LENGTH = 100;
@Id
@Column(name = "RES_VER_PID")

View File

@ -25,28 +25,9 @@ import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.annotations.Columns;
import org.hibernate.annotations.OptimisticLock;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import javax.persistence.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
@ -61,6 +42,7 @@ import java.util.Collection;
})
public class ResourceHistoryTable extends BaseHasResource implements Serializable {
public static final String IDX_RESVER_ID_VER = "IDX_RESVER_ID_VER";
public static final int SOURCE_URI_LENGTH = 100;
/**
* @see ResourceEncodingEnum
*/
@ -94,13 +76,18 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
@org.hibernate.annotations.Type(type = JpaConstants.ORG_HIBERNATE_TYPE_TEXT_TYPE)
@OptimisticLock(excluded = true)
private String myResourceTextVc;
@Column(name = "RES_ENCODING", nullable = false, length = ENCODING_COL_LENGTH)
@Enumerated(EnumType.STRING)
@OptimisticLock(excluded = true)
private ResourceEncodingEnum myEncoding;
@OneToOne(mappedBy = "myResourceHistoryTable", cascade = {CascadeType.REMOVE})
private ResourceHistoryProvenanceEntity myProvenance;
// TODO: This was added in 6.8.0 - In the future we should drop ResourceHistoryProvenanceEntity
@Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true)
private String mySourceUri;
// TODO: This was added in 6.8.0 - In the future we should drop ResourceHistoryProvenanceEntity
@Column(name = "REQUEST_ID", length = Constants.REQUEST_ID_LENGTH, nullable = true)
private String myRequestId;
/**
* Constructor
@ -109,6 +96,22 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
super();
}
public String getSourceUri() {
return mySourceUri;
}
public void setSourceUri(String theSourceUri) {
mySourceUri = theSourceUri;
}
public String getRequestId() {
return myRequestId;
}
public void setRequestId(String theRequestId) {
myRequestId = theRequestId;
}
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
@ -215,6 +218,10 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
return myResourceVersion;
}
public void setVersion(long theVersion) {
myResourceVersion = theVersion;
}
@Override
public boolean isDeleted() {
return getDeleted() != null;
@ -225,10 +232,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
setDeleted(null);
}
public void setVersion(long theVersion) {
myResourceVersion = theVersion;
}
@Override
public JpaPid getPersistentId() {
return JpaPid.fromId(myResourceId);

View File

@ -19,7 +19,6 @@
*/
package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.apache.commons.lang3.builder.CompareToBuilder;

View File

@ -48,6 +48,7 @@ import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBi
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue;
import org.hibernate.tuple.ValueGenerator;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.InstantType;
import javax.persistence.CascadeType;
import javax.persistence.Column;
@ -818,7 +819,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
}
b.append("lastUpdated", getUpdated().getValueAsString());
if (getDeleted() != null) {
b.append("deleted");
b.append("deleted", new InstantType(getDeleted()).getValueAsString());
}
return b.build();
}
@ -883,7 +884,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
}
private void populateId(IIdType retVal) {
if (myFhirId != null) {
if (myFhirId != null && !myFhirId.isEmpty()) {
retVal.setValue(getResourceType() + '/' + myFhirId + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
} else if (getTransientForcedId() != null) {
// Avoid a join query if possible

View File

@ -1,8 +1,15 @@
package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.primitive.IdDt;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import javax.measure.quantity.Force;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.*;
public class ResourceTableTest {
@ -16,5 +23,26 @@ public class ResourceTableTest {
}
}
@ParameterizedTest
@CsvSource(value={
"123, 123, Patient/123/_history/1",
", 123, Patient/123/_history/1",
"null, 456, Patient/456/_history/1"
},nullValues={"null"})
public void testPopulateId(String theFhirId, String theForcedId, String theExpected) {
// Given
ResourceTable t = new ResourceTable();
t.setFhirId(theFhirId);
ForcedId forcedId = new ForcedId();
forcedId.setForcedId(theForcedId);
t.setForcedId(forcedId);
t.setResourceType(new Patient().getResourceType().name());
t.setVersionForUnitTest(1);
// When
IdDt actual = t.getIdDt();
// Then
assertTrue(actual.equals(theExpected));
}
}

View File

@ -27,30 +27,48 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.model.api.ExtensionDt;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.*;
import ca.uhn.fhir.util.DatatypeUtil;
import ca.uhn.fhir.util.ExtensionUtil;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.HapiExtensions;
import ca.uhn.fhir.util.PhoneticEncoderUtil;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.dstu3.model.Extension;
import org.hl7.fhir.dstu3.model.SearchParameter;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseExtension;
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.*;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.startsWith;
@Service
public class SearchParameterCanonicalizer {
private static final Logger ourLog = LoggerFactory.getLogger(SearchParameterCanonicalizer.class);
private final FhirContext myFhirContext;
private final FhirTerser myTerser;
@Autowired
public SearchParameterCanonicalizer(FhirContext theFhirContext) {
myFhirContext = theFhirContext;
myTerser = myFhirContext.newTerser();
}
private static Collection<String> toStrings(Collection<? extends IPrimitiveType<String>> theBase) {
@ -95,6 +113,14 @@ public class SearchParameterCanonicalizer {
String name = theNextSp.getCode();
String description = theNextSp.getDescription();
String path = theNextSp.getXpath();
Collection<String> baseResource = toStrings(Collections.singletonList(theNextSp.getBaseElement()));
List<String> baseCustomResources = extractDstu2CustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE);
if(!baseCustomResources.isEmpty()){
baseResource = Collections.singleton(baseCustomResources.get(0));
}
RestSearchParameterTypeEnum paramType = null;
RuntimeSearchParam.RuntimeSearchParamStatusEnum status = null;
if (theNextSp.getTypeElement().getValueAsEnum() != null) {
@ -138,8 +164,11 @@ public class SearchParameterCanonicalizer {
break;
}
}
Set<String> providesMembershipInCompartments = Collections.emptySet();
Set<String> targets = DatatypeUtil.toStringSet(theNextSp.getTarget());
Set<String> targetResources = DatatypeUtil.toStringSet(theNextSp.getTarget());
List<String> targetCustomResources = extractDstu2CustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE);
maybeAddCustomResourcesToResources(targetResources, targetCustomResources);
if (isBlank(name) || isBlank(path)) {
if (paramType != RestSearchParameterTypeEnum.COMPOSITE) {
@ -164,14 +193,19 @@ public class SearchParameterCanonicalizer {
}
List<RuntimeSearchParam.Component> components = Collections.emptyList();
Collection<? extends IPrimitiveType<String>> base = Collections.singletonList(theNextSp.getBaseElement());
return new RuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, toStrings(base));
return new RuntimeSearchParam(id, uri, name, description, path, paramType, Collections.emptySet(), targetResources, status, unique, components, baseResource);
}
private RuntimeSearchParam canonicalizeSearchParameterDstu3(org.hl7.fhir.dstu3.model.SearchParameter theNextSp) {
String name = theNextSp.getCode();
String description = theNextSp.getDescription();
String path = theNextSp.getExpression();
List<String> baseResources = new ArrayList<>(toStrings(theNextSp.getBase()));
List<String> baseCustomResources = extractDstu3CustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE);
maybeAddCustomResourcesToResources(baseResources, baseCustomResources);
RestSearchParameterTypeEnum paramType = null;
RuntimeSearchParam.RuntimeSearchParamStatusEnum status = null;
if (theNextSp.getType() != null) {
@ -222,8 +256,11 @@ public class SearchParameterCanonicalizer {
break;
}
}
Set<String> providesMembershipInCompartments = Collections.emptySet();
Set<String> targets = DatatypeUtil.toStringSet(theNextSp.getTarget());
Set<String> targetResources = DatatypeUtil.toStringSet(theNextSp.getTarget());
List<String> targetCustomResources = extractDstu3CustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE);
maybeAddCustomResourcesToResources(targetResources, targetCustomResources);
if (isBlank(name) || isBlank(path) || paramType == null) {
if (paramType != RestSearchParameterTypeEnum.COMPOSITE) {
@ -252,35 +289,23 @@ public class SearchParameterCanonicalizer {
components.add(new RuntimeSearchParam.Component(next.getExpression(), next.getDefinition().getReferenceElement().toUnqualifiedVersionless().getValue()));
}
return new RuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, toStrings(theNextSp.getBase()));
return new RuntimeSearchParam(id, uri, name, description, path, paramType, Collections.emptySet(), targetResources, status, unique, components, baseResources);
}
private RuntimeSearchParam canonicalizeSearchParameterR4Plus(IBaseResource theNextSp) {
FhirTerser terser = myFhirContext.newTerser();
String name = terser.getSinglePrimitiveValueOrNull(theNextSp, "code");
String description = terser.getSinglePrimitiveValueOrNull(theNextSp, "description");
String path = terser.getSinglePrimitiveValueOrNull(theNextSp, "expression");
List<String> base = terser
.getValues(theNextSp, "base", IPrimitiveType.class)
.stream()
.map(IPrimitiveType::getValueAsString)
.collect(Collectors.toList());
if (theNextSp instanceof IBaseHasExtensions) {
((IBaseHasExtensions) theNextSp)
.getExtension()
.stream()
.filter(t -> HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE.equals(t.getUrl()))
.filter(t -> t.getValue() instanceof IPrimitiveType)
.map(t -> ((IPrimitiveType<?>) t.getValue()))
.map(IPrimitiveType::getValueAsString)
.filter(StringUtils::isNotBlank)
.forEach(base::add);
}
String name = myTerser.getSinglePrimitiveValueOrNull(theNextSp, "code");
String description = myTerser.getSinglePrimitiveValueOrNull(theNextSp, "description");
String path = myTerser.getSinglePrimitiveValueOrNull(theNextSp, "expression");
Set<String> baseResources = extractR4PlusResources("base", theNextSp);
List<String> baseCustomResources = extractR4PlusCustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE);
maybeAddCustomResourcesToResources(baseResources, baseCustomResources);
RestSearchParameterTypeEnum paramType = null;
RuntimeSearchParam.RuntimeSearchParamStatusEnum status = null;
switch (terser.getSinglePrimitiveValue(theNextSp, "type").orElse("")) {
switch (myTerser.getSinglePrimitiveValue(theNextSp, "type").orElse("")) {
case "composite":
paramType = RestSearchParameterTypeEnum.COMPOSITE;
break;
@ -309,7 +334,7 @@ public class SearchParameterCanonicalizer {
paramType = RestSearchParameterTypeEnum.SPECIAL;
break;
}
switch (terser.getSinglePrimitiveValue(theNextSp, "status").orElse("")) {
switch (myTerser.getSinglePrimitiveValue(theNextSp, "status").orElse("")) {
case "active":
status = RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE;
break;
@ -323,24 +348,11 @@ public class SearchParameterCanonicalizer {
status = RuntimeSearchParam.RuntimeSearchParamStatusEnum.UNKNOWN;
break;
}
Set<String> providesMembershipInCompartments = Collections.emptySet();
Set<String> targets = terser
.getValues(theNextSp, "target", IPrimitiveType.class)
.stream()
.map(IPrimitiveType::getValueAsString)
.collect(Collectors.toSet());
if (theNextSp instanceof IBaseHasExtensions) {
((IBaseHasExtensions) theNextSp)
.getExtension()
.stream()
.filter(t -> HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE.equals(t.getUrl()))
.filter(t -> t.getValue() instanceof IPrimitiveType)
.map(t -> ((IPrimitiveType<?>) t.getValue()))
.map(IPrimitiveType::getValueAsString)
.filter(StringUtils::isNotBlank)
.forEach(targets::add);
}
Set<String> targetResources = extractR4PlusResources("target", theNextSp);
List<String> targetCustomResources = extractR4PlusCustomResourcesFromExtensions(theNextSp, HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE);
maybeAddCustomResourcesToResources(targetResources, targetCustomResources);
if (isBlank(name) || isBlank(path) || paramType == null) {
if ("_text".equals(name) || "_content".equals(name)) {
@ -351,7 +363,7 @@ public class SearchParameterCanonicalizer {
}
IIdType id = theNextSp.getIdElement();
String uri = terser.getSinglePrimitiveValueOrNull(theNextSp, "url");
String uri = myTerser.getSinglePrimitiveValueOrNull(theNextSp, "url");
ComboSearchParamType unique = null;
String value = ((IBaseHasExtensions) theNextSp).getExtension()
@ -369,9 +381,9 @@ public class SearchParameterCanonicalizer {
}
List<RuntimeSearchParam.Component> components = new ArrayList<>();
for (IBase next : terser.getValues(theNextSp, "component")) {
String expression = terser.getSinglePrimitiveValueOrNull(next, "expression");
String definition = terser.getSinglePrimitiveValueOrNull(next, "definition");
for (IBase next : myTerser.getValues(theNextSp, "component")) {
String expression = myTerser.getSinglePrimitiveValueOrNull(next, "expression");
String definition = myTerser.getSinglePrimitiveValueOrNull(next, "definition");
if (startsWith(definition, "/SearchParameter/")) {
definition = definition.substring(1);
}
@ -379,7 +391,15 @@ public class SearchParameterCanonicalizer {
components.add(new RuntimeSearchParam.Component(expression, definition));
}
return new RuntimeSearchParam(id, uri, name, description, path, paramType, providesMembershipInCompartments, targets, status, unique, components, base);
return new RuntimeSearchParam(id, uri, name, description, path, paramType, Collections.emptySet(), targetResources, status, unique, components, baseResources);
}
private Set<String> extractR4PlusResources(String thePath, IBaseResource theNextSp) {
return myTerser
.getValues(theNextSp, thePath, IPrimitiveType.class)
.stream()
.map(IPrimitiveType::getValueAsString)
.collect(Collectors.toSet());
}
/**
@ -427,5 +447,62 @@ public class SearchParameterCanonicalizer {
}
}
private List<String> extractDstu2CustomResourcesFromExtensions(ca.uhn.fhir.model.dstu2.resource.SearchParameter theSearchParameter, String theExtensionUrl) {
List<ExtensionDt> customSpExtensionDt = theSearchParameter.getUndeclaredExtensionsByUrl(theExtensionUrl);
return customSpExtensionDt.stream()
.map(theExtensionDt -> theExtensionDt.getValueAsPrimitive().getValueAsString())
.filter(StringUtils::isNotBlank)
.collect(Collectors.toList());
}
private List<String> extractDstu3CustomResourcesFromExtensions(org.hl7.fhir.dstu3.model.SearchParameter theSearchParameter, String theExtensionUrl) {
List<Extension> customSpExtensions = theSearchParameter.getExtensionsByUrl(theExtensionUrl);
return customSpExtensions.stream()
.map(theExtension -> theExtension.getValueAsPrimitive().getValueAsString())
.filter(StringUtils::isNotBlank)
.collect(Collectors.toList());
}
private List<String> extractR4PlusCustomResourcesFromExtensions(IBaseResource theSearchParameter, String theExtensionUrl) {
List<String> retVal = new ArrayList<>();
if (theSearchParameter instanceof IBaseHasExtensions) {
((IBaseHasExtensions) theSearchParameter)
.getExtension()
.stream()
.filter(t -> theExtensionUrl.equals(t.getUrl()))
.filter(t -> t.getValue() instanceof IPrimitiveType)
.map(t -> ((IPrimitiveType<?>) t.getValue()))
.map(IPrimitiveType::getValueAsString)
.filter(StringUtils::isNotBlank)
.forEach(retVal::add);
}
return retVal;
}
private <T extends Collection<String>> void maybeAddCustomResourcesToResources(T theResources, List<String> theCustomResources) {
// SearchParameter base and target components require strict binding to ResourceType for dstu[2|3], R4, R4B
// and to Version Independent Resource Types for R5.
//
// To handle custom resources, we set a placeholder of type 'Resource' in the base or target component and define
// the custom resource by adding a corresponding extension with url HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE
// or HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE with the name of the custom resource.
//
// To provide a base/target list that contains both the resources and customResources, we need to remove the placeholders
// from the theResources and add theCustomResources.
if (!theCustomResources.isEmpty()){
theResources.removeAll(Collections.singleton("Resource"));
theResources.addAll(theCustomResources);
}
}
}

View File

@ -1,63 +0,0 @@
package ca.uhn.fhir.jpa.searchparam;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorDstu3;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import ca.uhn.fhir.util.StopWatch;
import org.hl7.fhir.dstu3.model.Patient;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class IndexStressTest {
private static final Logger ourLog = LoggerFactory.getLogger(IndexStressTest.class);
@Test
public void testExtractSearchParams() {
Patient p = new Patient();
p.addName().setFamily("FOO").addGiven("BAR").addGiven("BAR");
p.getMaritalStatus().setText("DDDDD");
p.addAddress().addLine("A").addLine("B").addLine("C");
FhirContext ctx = FhirContext.forDstu3();
IValidationSupport mockValidationSupport = mock(IValidationSupport.class);
when(mockValidationSupport.getFhirContext()).thenReturn(ctx);
ISearchParamRegistry searchParamRegistry = mock(ISearchParamRegistry.class);
SearchParamExtractorDstu3 extractor = new SearchParamExtractorDstu3(new StorageSettings(), new PartitionSettings(), ctx, searchParamRegistry);
extractor.start();
ResourceSearchParams resourceSearchParams = new ResourceSearchParams("Patient");
ctx.getResourceDefinition("Patient")
.getSearchParams()
.forEach(t -> resourceSearchParams.put(t.getName(), t));
when(searchParamRegistry.getActiveSearchParams(eq("Patient"))).thenReturn(resourceSearchParams);
Set<ResourceIndexedSearchParamString> params = extractor.extractSearchParamStrings(p);
StopWatch sw = new StopWatch();
int loops = 100;
for (int i = 0; i < loops; i++) {
params = extractor.extractSearchParamStrings(p);
}
ourLog.info("Indexed {} times in {}ms/time", loops, sw.getMillisPerOperation(loops));
assertEquals(9, params.size());
verify(mockValidationSupport, times(1)).fetchAllStructureDefinitions();
}
}

View File

@ -2,11 +2,19 @@ package ca.uhn.fhir.jpa.searchparam.registry;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.model.api.ExtensionDt;
import ca.uhn.fhir.model.dstu2.valueset.ConformanceResourceStatusEnum;
import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum;
import ca.uhn.fhir.model.dstu2.valueset.SearchParamTypeEnum;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.BaseResource;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.SearchParameter;
import org.hl7.fhir.r4.model.StringType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
@ -14,15 +22,104 @@ import org.mockito.junit.jupiter.MockitoExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static ca.uhn.fhir.util.HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE;
import static ca.uhn.fhir.util.HapiExtensions.EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ExtendWith(MockitoExtension.class)
public class SearchParameterCanonicalizerTest {
private static final Logger ourLog = LoggerFactory.getLogger(SearchParameterCanonicalizerTest.class);
ca.uhn.fhir.model.dstu2.resource.SearchParameter initSearchParamDstu2(){
ca.uhn.fhir.model.dstu2.resource.SearchParameter sp = new ca.uhn.fhir.model.dstu2.resource.SearchParameter();
sp.setId("SearchParameter/meal-chef");
sp.setUrl("http://example.org/SearchParameter/meal-chef");
sp.setBase(ResourceTypeEnum.RESOURCE);
sp.setCode("chef");
sp.setType(SearchParamTypeEnum.REFERENCE);
sp.setStatus(ConformanceResourceStatusEnum.ACTIVE);
sp.setXpath("Meal.chef | Observation.subject");
sp.addTarget(ResourceTypeEnum.RESOURCE);
sp.addTarget(ResourceTypeEnum.OBSERVATION);
sp.addUndeclaredExtension(new ExtensionDt(false, EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE, new StringDt("Meal")));
sp.addUndeclaredExtension(new ExtensionDt(false, EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE, new StringDt("Chef")));
return sp;
}
org.hl7.fhir.dstu3.model.SearchParameter initSearchParamDstu3(){
org.hl7.fhir.dstu3.model.SearchParameter sp = new org.hl7.fhir.dstu3.model.SearchParameter();
sp.setId("SearchParameter/meal-chef");
sp.setUrl("http://example.org/SearchParameter/meal-chef");
sp.addBase("Resource");
sp.addBase("Patient");
sp.setCode("chef");
sp.setType(org.hl7.fhir.dstu3.model.Enumerations.SearchParamType.REFERENCE);
sp.setStatus(org.hl7.fhir.dstu3.model.Enumerations.PublicationStatus.ACTIVE);
sp.setExpression("Meal.chef | Observation.subject");
sp.addTarget("Resource");
sp.addTarget("Observation");
sp.addExtension(EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE, new org.hl7.fhir.dstu3.model.StringType("Meal"));
sp.addExtension(EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE, new org.hl7.fhir.dstu3.model.StringType("Chef"));
return sp;
}
IBaseResource initSearchParamR4(){
SearchParameter sp = new SearchParameter();
sp.setId("SearchParameter/meal-chef");
sp.setUrl("http://example.org/SearchParameter/meal-chef");
sp.addBase("Resource");
sp.addBase("Patient");
sp.setCode("chef");
sp.setType(Enumerations.SearchParamType.REFERENCE);
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
sp.setExpression("Meal.chef | Observation.subject");
sp.addTarget("Resource");
sp.addTarget("Observation");
sp.addExtension(EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE, new StringType("Meal"));
sp.addExtension(EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE, new StringType("Chef"));
return sp;
}
IBaseResource initSearchParamR4B(){
org.hl7.fhir.r4b.model.SearchParameter sp = new org.hl7.fhir.r4b.model.SearchParameter();
sp.setId("SearchParameter/meal-chef");
sp.setUrl("http://example.org/SearchParameter/meal-chef");
sp.addBase("Resource");
sp.addBase("Patient");
sp.setCode("chef");
sp.setType(org.hl7.fhir.r4b.model.Enumerations.SearchParamType.REFERENCE);
sp.setStatus(org.hl7.fhir.r4b.model.Enumerations.PublicationStatus.ACTIVE);
sp.setExpression("Meal.chef | Observation.subject");
sp.addTarget("Resource");
sp.addTarget("Observation");
sp.addExtension(EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE, new org.hl7.fhir.r4b.model.StringType("Meal"));
sp.addExtension(EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE, new org.hl7.fhir.r4b.model.StringType("Chef"));
return sp;
}
IBaseResource initSearchParamR5(){
org.hl7.fhir.r5.model.SearchParameter sp = new org.hl7.fhir.r5.model.SearchParameter();
sp.setId("SearchParameter/meal-chef");
sp.setUrl("http://example.org/SearchParameter/meal-chef");
sp.addBase(org.hl7.fhir.r5.model.Enumerations.VersionIndependentResourceTypesAll.RESOURCE);
sp.addBase(org.hl7.fhir.r5.model.Enumerations.VersionIndependentResourceTypesAll.PATIENT);
sp.setCode("chef");
sp.setType(org.hl7.fhir.r5.model.Enumerations.SearchParamType.REFERENCE);
sp.setStatus(org.hl7.fhir.r5.model.Enumerations.PublicationStatus.ACTIVE);
sp.setExpression("Meal.chef | Observation.subject");
sp.addTarget(org.hl7.fhir.r5.model.Enumerations.VersionIndependentResourceTypesAll.RESOURCE);
sp.addTarget(org.hl7.fhir.r5.model.Enumerations.VersionIndependentResourceTypesAll.OBSERVATION);
sp.addExtension(EXTENSION_SEARCHPARAM_CUSTOM_BASE_RESOURCE, new org.hl7.fhir.r5.model.StringType("Meal"));
sp.addExtension(EXTENSION_SEARCHPARAM_CUSTOM_TARGET_RESOURCE, new org.hl7.fhir.r5.model.StringType("Chef"));
return sp;
}
@ParameterizedTest
@ValueSource(booleans = {false, true})
public void testCanonicalizeSearchParameterWithCustomType(boolean theConvertToR5) {
@ -37,7 +134,6 @@ public class SearchParameterCanonicalizerTest {
sp.setExpression("Meal.chef | Observation.subject");
sp.addTarget("Chef");
sp.addTarget("Observation");
IBaseResource searchParamToCanonicalize = sp;
SearchParameterCanonicalizer svc;
if (theConvertToR5) {
@ -57,7 +153,51 @@ public class SearchParameterCanonicalizerTest {
assertThat(output.getPathsSplit(), containsInAnyOrder("Meal.chef", "Observation.subject"));
assertThat(output.getBase(), containsInAnyOrder("Meal", "Patient"));
assertThat(output.getTargets(), contains("Chef", "Observation"));
}
@ParameterizedTest
@ValueSource(strings = {"Dstu2", "Dstu3", "R4", "R4B", "R5"})
public void testCanonicalizeSearchParameterWithCustomTypeAllVersion(String version) {
SearchParameterCanonicalizer svc;
IBaseResource searchParamToCanonicalize;
switch (version){
case "Dstu2":
searchParamToCanonicalize = initSearchParamDstu2();
svc = new SearchParameterCanonicalizer(FhirContext.forDstu2Cached());
break;
case "Dstu3":
searchParamToCanonicalize = initSearchParamDstu3();
svc = new SearchParameterCanonicalizer(FhirContext.forDstu3Cached());
break;
case "R4":
searchParamToCanonicalize = initSearchParamR4();
svc = new SearchParameterCanonicalizer(FhirContext.forR4Cached());
break;
case "R4B":
searchParamToCanonicalize = initSearchParamR4B();
svc = new SearchParameterCanonicalizer(FhirContext.forR4BCached());
break;
default:
searchParamToCanonicalize = initSearchParamR5();
svc = new SearchParameterCanonicalizer(FhirContext.forR5Cached());
break;
}
RuntimeSearchParam output = svc.canonicalizeSearchParameter(searchParamToCanonicalize);
assertEquals("chef", output.getName());
assertEquals(RestSearchParameterTypeEnum.REFERENCE, output.getParamType());
assertEquals(RuntimeSearchParam.RuntimeSearchParamStatusEnum.ACTIVE, output.getStatus());
assertThat(output.getPathsSplit(), containsInAnyOrder("Meal.chef", "Observation.subject"));
// DSTU2 Resources must only have 1 base
if ("Dstu2".equals(version)){
assertThat(output.getBase(), containsInAnyOrder("Meal"));
} else {
assertThat(output.getBase(), containsInAnyOrder("Meal", "Patient"));
}
assertThat(output.getTargets(), containsInAnyOrder("Chef", "Observation"));
assertThat(output.getBase(), not(contains("Resource")));
assertThat(output.getTargets(), not(contains("Resource")));
}
}

View File

@ -24,17 +24,21 @@ import ca.uhn.fhir.jpa.subscription.match.deliver.email.SubscriptionDeliveringEm
import ca.uhn.fhir.jpa.subscription.match.deliver.message.SubscriptionDeliveringMessageSubscriber;
import ca.uhn.fhir.jpa.subscription.match.deliver.resthook.SubscriptionDeliveringRestHookSubscriber;
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscriptionChannelType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.messaging.MessageHandler;
import java.util.Optional;
public class SubscriptionDeliveryHandlerFactory {
protected ApplicationContext myApplicationContext;
private IEmailSender myEmailSender;
@Autowired
private ApplicationContext myApplicationContext;
public SubscriptionDeliveryHandlerFactory(ApplicationContext theApplicationContext, IEmailSender theEmailSender) {
myApplicationContext = theApplicationContext;
myEmailSender = theEmailSender;
}
protected SubscriptionDeliveringEmailSubscriber newSubscriptionDeliveringEmailSubscriber(IEmailSender theEmailSender) {
return myApplicationContext.getBean(SubscriptionDeliveringEmailSubscriber.class, theEmailSender);
@ -60,7 +64,4 @@ public class SubscriptionDeliveryHandlerFactory {
}
}
public void setEmailSender(IEmailSender theEmailSender) {
myEmailSender = theEmailSender;
}
}

View File

@ -41,6 +41,7 @@ import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionRegiste
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.subscription.model.config.SubscriptionModelConfig;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
@ -94,8 +95,8 @@ public class SubscriptionProcessorConfig {
}
@Bean
public SubscriptionDeliveryHandlerFactory subscriptionDeliveryHandlerFactory() {
return new SubscriptionDeliveryHandlerFactory();
public SubscriptionDeliveryHandlerFactory subscriptionDeliveryHandlerFactory(ApplicationContext theApplicationContext, IEmailSender theEmailSender) {
return new SubscriptionDeliveryHandlerFactory(theApplicationContext, theEmailSender);
}
@Bean

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.subscription.match.deliver.BaseSubscriptionDeliverySubscr
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
import ca.uhn.fhir.jpa.subscription.model.ResourceDeliveryMessage;
import ca.uhn.fhir.rest.api.EncodingEnum;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -103,4 +104,9 @@ public class SubscriptionDeliveringEmailSubscriber extends BaseSubscriptionDeliv
public void setEmailSender(IEmailSender theEmailSender) {
myEmailSender = theEmailSender;
}
@VisibleForTesting
public IEmailSender getEmailSender(){
return myEmailSender;
}
}

View File

@ -0,0 +1,106 @@
/*-
* #%L
* HAPI FHIR Subscription Server
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.subscription.match.matcher.subscriber;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.server.messaging.BaseResourceModifiedMessage;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class SubscriptionDeliveryRequest {
// One of these two will be populated
private final IBaseResource myPayload;
private final IIdType myPayloadId;
private final ActiveSubscription myActiveSubscription;
private final RestOperationTypeEnum myRestOperationType;
private final RequestPartitionId myRequestPartitionId;
private final String myTransactionId;
public SubscriptionDeliveryRequest(@Nonnull IBaseBundle theBundlePayload, @Nonnull ActiveSubscription theActiveSubscription, @Nonnull RestOperationTypeEnum theOperationType, @Nullable RequestPartitionId theRequestPartitionId, @Nullable String theTransactionId) {
myPayload = theBundlePayload;
myPayloadId = null;
myActiveSubscription = theActiveSubscription;
myRestOperationType = theOperationType;
myRequestPartitionId = theRequestPartitionId;
myTransactionId = theTransactionId;
}
public SubscriptionDeliveryRequest(@Nonnull IBaseResource thePayload, @Nonnull ResourceModifiedMessage theMsg, @Nonnull ActiveSubscription theActiveSubscription) {
myPayload = thePayload;
myPayloadId = null;
myActiveSubscription = theActiveSubscription;
myRestOperationType = theMsg.getOperationType().asRestOperationType();
myRequestPartitionId = theMsg.getPartitionId();
myTransactionId = theMsg.getTransactionId();
}
public SubscriptionDeliveryRequest(@Nonnull IIdType thePayloadId, @Nonnull ResourceModifiedMessage theMsg, @Nonnull ActiveSubscription theActiveSubscription) {
myPayload = null;
myPayloadId = thePayloadId;
myActiveSubscription = theActiveSubscription;
myRestOperationType = theMsg.getOperationType().asRestOperationType();
myRequestPartitionId = theMsg.getPartitionId();
myTransactionId = theMsg.getTransactionId();
}
public IBaseResource getPayload() {
return myPayload;
}
public ActiveSubscription getActiveSubscription() {
return myActiveSubscription;
}
public RestOperationTypeEnum getRestOperationType() {
return myRestOperationType;
}
public BaseResourceModifiedMessage.OperationTypeEnum getOperationType() {
return BaseResourceModifiedMessage.OperationTypeEnum.from(myRestOperationType);
}
public RequestPartitionId getRequestPartitionId() {
return myRequestPartitionId;
}
public String getTransactionId() {
return myTransactionId;
}
public CanonicalSubscription getSubscription() {
return myActiveSubscription.getSubscription();
}
public IIdType getPayloadId() {
return myPayloadId;
}
public boolean hasPayload() {
return myPayload != null;
}
}

View File

@ -37,6 +37,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.messaging.MessageChannel;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
public class SubscriptionMatchDeliverer {
@ -51,11 +54,46 @@ public class SubscriptionMatchDeliverer {
mySubscriptionChannelRegistry = theSubscriptionChannelRegistry;
}
public boolean deliverPayload(IBaseResource thePayload, ResourceModifiedMessage theMsg, ActiveSubscription theActiveSubscription, InMemoryMatchResult matchResult) {
EncodingEnum encoding = null;
public boolean deliverPayload(@Nullable IBaseResource thePayload, @Nonnull ResourceModifiedMessage theMsg, @Nonnull ActiveSubscription theActiveSubscription, @Nullable InMemoryMatchResult theInMemoryMatchResult) {
SubscriptionDeliveryRequest subscriptionDeliveryRequest;
if (thePayload != null) {
subscriptionDeliveryRequest = new SubscriptionDeliveryRequest(thePayload, theMsg, theActiveSubscription);
} else {
subscriptionDeliveryRequest = new SubscriptionDeliveryRequest(theMsg.getPayloadId(myFhirContext), theMsg, theActiveSubscription);
}
ResourceDeliveryMessage deliveryMsg = buildResourceDeliveryMessage(subscriptionDeliveryRequest);
deliveryMsg.copyAdditionalPropertiesFrom(theMsg);
CanonicalSubscription subscription = theActiveSubscription.getSubscription();
String subscriptionId = theActiveSubscription.getId();;
return sendToDeliveryChannel(theActiveSubscription, theInMemoryMatchResult, deliveryMsg);
}
public boolean deliverPayload(@Nonnull SubscriptionDeliveryRequest subscriptionDeliveryRequest, @Nullable InMemoryMatchResult theInMemoryMatchResult) {
ResourceDeliveryMessage deliveryMsg = buildResourceDeliveryMessage(subscriptionDeliveryRequest);
return sendToDeliveryChannel(subscriptionDeliveryRequest.getActiveSubscription(), theInMemoryMatchResult, deliveryMsg);
}
private boolean sendToDeliveryChannel(@Nonnull ActiveSubscription theActiveSubscription, @Nullable InMemoryMatchResult theInMemoryMatchResult, @Nonnull ResourceDeliveryMessage deliveryMsg) {
if (!callHooks(theActiveSubscription, theInMemoryMatchResult, deliveryMsg)) {
return false;
}
boolean retVal = false;
ResourceDeliveryJsonMessage wrappedMsg = new ResourceDeliveryJsonMessage(deliveryMsg);
MessageChannel deliveryChannel = mySubscriptionChannelRegistry.getDeliverySenderChannel(theActiveSubscription.getChannelName());
if (deliveryChannel != null) {
retVal = true;
trySendToDeliveryChannel(wrappedMsg, deliveryChannel);
} else {
ourLog.warn("Do not have delivery channel for subscription {}", theActiveSubscription.getId());
}
return retVal;
}
private ResourceDeliveryMessage buildResourceDeliveryMessage(@Nonnull SubscriptionDeliveryRequest theRequest) {
EncodingEnum encoding = null;
CanonicalSubscription subscription = theRequest.getSubscription();
if (subscription != null && subscription.getPayloadString() != null && !subscription.getPayloadString().isEmpty()) {
encoding = EncodingEnum.forContentType(subscription.getPayloadString());
@ -63,42 +101,30 @@ public class SubscriptionMatchDeliverer {
encoding = defaultIfNull(encoding, EncodingEnum.JSON);
ResourceDeliveryMessage deliveryMsg = new ResourceDeliveryMessage();
deliveryMsg.setPartitionId(theMsg.getPartitionId());
deliveryMsg.setPartitionId(theRequest.getRequestPartitionId());
if (thePayload != null) {
deliveryMsg.setPayload(myFhirContext, thePayload, encoding);
if (theRequest.hasPayload()) {
deliveryMsg.setPayload(myFhirContext, theRequest.getPayload(), encoding);
} else {
deliveryMsg.setPayloadId(theMsg.getPayloadId(myFhirContext));
deliveryMsg.setPayloadId(theRequest.getPayloadId());
}
deliveryMsg.setSubscription(subscription);
deliveryMsg.setOperationType(theMsg.getOperationType());
deliveryMsg.setTransactionId(theMsg.getTransactionId());
deliveryMsg.copyAdditionalPropertiesFrom(theMsg);
deliveryMsg.setOperationType(theRequest.getOperationType());
deliveryMsg.setTransactionId(theRequest.getTransactionId());
return deliveryMsg;
}
private boolean callHooks(ActiveSubscription theActiveSubscription, InMemoryMatchResult theInMemoryMatchResult, ResourceDeliveryMessage deliveryMsg) {
// Interceptor call: SUBSCRIPTION_RESOURCE_MATCHED
HookParams params = new HookParams()
.add(CanonicalSubscription.class, theActiveSubscription.getSubscription())
.add(ResourceDeliveryMessage.class, deliveryMsg)
.add(InMemoryMatchResult.class, matchResult);
.add(InMemoryMatchResult.class, theInMemoryMatchResult);
if (!myInterceptorBroadcaster.callHooks(Pointcut.SUBSCRIPTION_RESOURCE_MATCHED, params)) {
ourLog.info("Interceptor has decided to abort processing of subscription {}", subscriptionId);
ourLog.info("Interceptor has decided to abort processing of subscription {}", theActiveSubscription.getId());
return false;
}
return sendToDeliveryChannel(theActiveSubscription, deliveryMsg);
}
private boolean sendToDeliveryChannel(ActiveSubscription nextActiveSubscription, ResourceDeliveryMessage theDeliveryMsg) {
boolean retVal = false;
ResourceDeliveryJsonMessage wrappedMsg = new ResourceDeliveryJsonMessage(theDeliveryMsg);
MessageChannel deliveryChannel = mySubscriptionChannelRegistry.getDeliverySenderChannel(nextActiveSubscription.getChannelName());
if (deliveryChannel != null) {
retVal = true;
trySendToDeliveryChannel(wrappedMsg, deliveryChannel);
} else {
ourLog.warn("Do not have delivery channel for subscription {}", nextActiveSubscription.getId());
}
return retVal;
return true;
}
private void trySendToDeliveryChannel(ResourceDeliveryJsonMessage theWrappedMsg, MessageChannel theDeliveryChannel) {

View File

@ -31,7 +31,6 @@ public final class SubscriptionTopicCanonicalizer {
private SubscriptionTopicCanonicalizer() {
}
// WIP STR5 use elsewhere
public static SubscriptionTopic canonicalizeTopic(FhirContext theFhirContext, IBaseResource theSubscriptionTopic) {
switch (theFhirContext.getVersion().getVersion()) {
case R4B:

View File

@ -20,20 +20,14 @@
package ca.uhn.fhir.jpa.topic;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelRegistry;
import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionMatchDeliverer;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionQueryValidator;
import org.springframework.context.annotation.Bean;
public class SubscriptionTopicConfig {
@Bean
SubscriptionMatchDeliverer subscriptionMatchDeliverer(FhirContext theFhirContext, IInterceptorBroadcaster theInterceptorBroadcaster, SubscriptionChannelRegistry theSubscriptionChannelRegistry) {
return new SubscriptionMatchDeliverer(theFhirContext, theInterceptorBroadcaster, theSubscriptionChannelRegistry);
}
@Bean
SubscriptionTopicMatchingSubscriber subscriptionTopicMatchingSubscriber(FhirContext theFhirContext) {
return new SubscriptionTopicMatchingSubscriber(theFhirContext);
@ -68,4 +62,9 @@ public class SubscriptionTopicConfig {
SubscriptionTopicValidatingInterceptor subscriptionTopicValidatingInterceptor(FhirContext theFhirContext, SubscriptionQueryValidator theSubscriptionQueryValidator) {
return new SubscriptionTopicValidatingInterceptor(theFhirContext, theSubscriptionQueryValidator);
}
@Bean
SubscriptionTopicDispatcher subscriptionTopicDispatcher(SubscriptionRegistry theSubscriptionRegistry, SubscriptionMatchDeliverer theSubscriptionMatchDeliverer, SubscriptionTopicPayloadBuilder theSubscriptionTopicPayloadBuilder) {
return new SubscriptionTopicDispatcher(theSubscriptionRegistry, theSubscriptionMatchDeliverer, theSubscriptionTopicPayloadBuilder);
}
}

View File

@ -0,0 +1,90 @@
/*-
* #%L
* HAPI FHIR Subscription Server
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.topic;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionDeliveryRequest;
import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionMatchDeliverer;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
import java.util.UUID;
/**
* Subscription topic notifications are natively supported in R5, R4B. They are also partially supported and in R4
* via the subscription backport spec <a href="http://build.fhir.org/ig/HL7/fhir-subscription-backport-ig/components.html">Subscription Backport</a>.
* In all versions, it is possible for a FHIR Repository to submit topic subscription notifications triggered by some
* arbitrary "business event". In R5 and R4B most subscription topic notifications will be triggered by a SubscriptionTopic
* match. However, in the R4 backport, the SubscriptionTopic is not supported and the SubscriptionTopicDispatcher service
* is provided to generate those notifications instead. Any custom java extension to the FHIR repository can @Autowire this service to
* send topic notifications to all Subscription resources subscribed to that topic.
*/
public class SubscriptionTopicDispatcher {
private final SubscriptionRegistry mySubscriptionRegistry;
private final SubscriptionMatchDeliverer mySubscriptionMatchDeliverer;
private final SubscriptionTopicPayloadBuilder mySubscriptionTopicPayloadBuilder;
public SubscriptionTopicDispatcher(SubscriptionRegistry theSubscriptionRegistry, SubscriptionMatchDeliverer theSubscriptionMatchDeliverer, SubscriptionTopicPayloadBuilder theSubscriptionTopicPayloadBuilder) {
mySubscriptionRegistry = theSubscriptionRegistry;
mySubscriptionMatchDeliverer = theSubscriptionMatchDeliverer;
mySubscriptionTopicPayloadBuilder = theSubscriptionTopicPayloadBuilder;
}
/**
* Deliver a Subscription topic notification to all subscriptions for the given topic.
*
* @param theTopicUrl Deliver to subscriptions for this topic
* @param theResources The list of resources to deliver. The first resource will be the primary "focus" resource per the Subscription documentation.
* This list should _not_ include the SubscriptionStatus. The SubscriptionStatus will be added as the first element to
* the delivered bundle. The reason for this is that the SubscriptionStatus needs to reference the subscription ID, which is
* not known until the bundle is delivered.
* @param theInMemoryMatchResult Information about the match event that led to this dispatch that is sent to SUBSCRIPTION_RESOURCE_MATCHED
* @param theRequestPartitionId The request partitions of the request, if any. This is used by subscriptions that need to perform repository
* operations as a part of their delivery. Those repository operations will be performed on the supplied request partitions
* @param theTransactionId The transaction ID of the request, if any. This is used for logging.
* @return The number of subscription notifications that were successfully queued for delivery
*/
public int dispatch(@Nonnull String theTopicUrl, @Nonnull List<IBaseResource> theResources, @Nonnull RestOperationTypeEnum theRequestType, @Nullable InMemoryMatchResult theInMemoryMatchResult, @Nullable RequestPartitionId theRequestPartitionId, @Nullable String theTransactionId) {
int count = 0;
List<ActiveSubscription> topicSubscriptions = mySubscriptionRegistry.getTopicSubscriptionsByTopic(theTopicUrl);
if (!topicSubscriptions.isEmpty()) {
for (ActiveSubscription activeSubscription : topicSubscriptions) {
// WIP STR5 apply subscription filters
IBaseBundle bundlePayload = mySubscriptionTopicPayloadBuilder.buildPayload(theResources, activeSubscription, theTopicUrl, theRequestType);
// WIP STR5 do we need to add a total? If so can do that with R5BundleFactory
bundlePayload.setId(UUID.randomUUID().toString());
SubscriptionDeliveryRequest subscriptionDeliveryRequest = new SubscriptionDeliveryRequest(bundlePayload, activeSubscription, theRequestType, theRequestPartitionId, theTransactionId);
boolean success = mySubscriptionMatchDeliverer.deliverPayload(subscriptionDeliveryRequest, theInMemoryMatchResult);
if (success) {
count++;
}
}
}
return count;
}
}

View File

@ -25,12 +25,11 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionMatchDeliverer;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.util.Logs;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.slf4j.Logger;
@ -41,8 +40,8 @@ import org.springframework.messaging.MessagingException;
import javax.annotation.Nonnull;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
public class SubscriptionTopicMatchingSubscriber implements MessageHandler {
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
@ -60,6 +59,8 @@ public class SubscriptionTopicMatchingSubscriber implements MessageHandler {
SubscriptionTopicPayloadBuilder mySubscriptionTopicPayloadBuilder;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Autowired
private SubscriptionTopicDispatcher mySubscriptionTopicDispatcher;
public SubscriptionTopicMatchingSubscriber(FhirContext theFhirContext) {
myFhirContext = theFhirContext;
@ -97,24 +98,17 @@ public class SubscriptionTopicMatchingSubscriber implements MessageHandler {
SubscriptionTopicMatcher matcher = new SubscriptionTopicMatcher(mySubscriptionTopicSupport, topic);
InMemoryMatchResult result = matcher.match(theMsg);
if (result.matched()) {
ourLog.info("Matched topic {} to message {}", topic.getUrl(), theMsg);
deliverToTopicSubscriptions(theMsg, topic, result);
int deliveries = deliverToTopicSubscriptions(theMsg, topic, result);
ourLog.info("Matched topic {} to message {}. Notifications sent to {} subscriptions for delivery.", topic.getUrl(), theMsg, deliveries);
}
}
}
private void deliverToTopicSubscriptions(ResourceModifiedMessage theMsg, SubscriptionTopic topic, InMemoryMatchResult result) {
List<ActiveSubscription> topicSubscriptions = mySubscriptionRegistry.getTopicSubscriptionsByTopic(topic.getUrl());
if (!topicSubscriptions.isEmpty()) {
IBaseResource matchedResource = theMsg.getNewPayload(myFhirContext);
private int deliverToTopicSubscriptions(ResourceModifiedMessage theMsg, SubscriptionTopic theSubscriptionTopic, InMemoryMatchResult theInMemoryMatchResult) {
String topicUrl = theSubscriptionTopic.getUrl();
List<IBaseResource> matchedResource = Collections.singletonList(theMsg.getNewPayload(myFhirContext));
RestOperationTypeEnum restOperationType = theMsg.getOperationType().asRestOperationType();
for (ActiveSubscription activeSubscription : topicSubscriptions) {
// WIP STR5 apply subscription filters
IBaseBundle bundlePayload = mySubscriptionTopicPayloadBuilder.buildPayload(matchedResource, theMsg, activeSubscription, topic);
// WIP STR5 do we need to add a total? If so can do that with R5BundleFactory
bundlePayload.setId(UUID.randomUUID().toString());
mySubscriptionMatchDeliverer.deliverPayload(bundlePayload, theMsg, activeSubscription, result);
}
}
return mySubscriptionTopicDispatcher.dispatch(topicUrl, matchedResource, restOperationType, theInMemoryMatchResult, theMsg.getPartitionId(), theMsg.getTransactionId());
}
}

View File

@ -23,7 +23,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_43_50;
import org.hl7.fhir.instance.model.api.IBaseBundle;
@ -32,8 +32,8 @@ import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.Reference;
import org.hl7.fhir.r5.model.SubscriptionStatus;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import java.util.List;
import java.util.UUID;
public class SubscriptionTopicPayloadBuilder {
@ -43,12 +43,12 @@ public class SubscriptionTopicPayloadBuilder {
myFhirContext = theFhirContext;
}
public IBaseBundle buildPayload(IBaseResource theMatchedResource, ResourceModifiedMessage theMsg, ActiveSubscription theActiveSubscription, SubscriptionTopic theTopic) {
public IBaseBundle buildPayload(List<IBaseResource> theResources, ActiveSubscription theActiveSubscription, String theTopicUrl, RestOperationTypeEnum theRestOperationType) {
BundleBuilder bundleBuilder = new BundleBuilder(myFhirContext);
// WIP STR5 set eventsSinceSubscriptionStart from the database
int eventsSinceSubscriptionStart = 1;
IBaseResource subscriptionStatus = buildSubscriptionStatus(theMatchedResource, theActiveSubscription, theTopic, eventsSinceSubscriptionStart);
IBaseResource subscriptionStatus = buildSubscriptionStatus(theResources, theActiveSubscription, theTopicUrl, eventsSinceSubscriptionStart);
FhirVersionEnum fhirVersion = myFhirContext.getVersion().getVersion();
@ -65,21 +65,25 @@ public class SubscriptionTopicPayloadBuilder {
// WIP STR5 is this the right type of entry? see http://hl7.org/fhir/subscriptionstatus-examples.html
// WIP STR5 Also see http://hl7.org/fhir/R4B/notification-full-resource.json.html need to conform to these
bundleBuilder.addCollectionEntry(subscriptionStatus);
switch (theMsg.getOperationType()) {
case CREATE:
bundleBuilder.addTransactionCreateEntry(theMatchedResource);
break;
case UPDATE:
bundleBuilder.addTransactionUpdateEntry(theMatchedResource);
break;
case DELETE:
bundleBuilder.addTransactionDeleteEntry(theMatchedResource);
break;
for (IBaseResource resource : theResources) {
switch(theRestOperationType) {
case CREATE:
bundleBuilder.addTransactionCreateEntry(resource);
break;
case UPDATE:
bundleBuilder.addTransactionUpdateEntry(resource);
break;
case DELETE:
bundleBuilder.addTransactionDeleteEntry(resource);
break;
}
}
return bundleBuilder.getBundle();
}
private SubscriptionStatus buildSubscriptionStatus(IBaseResource theMatchedResource, ActiveSubscription theActiveSubscription, SubscriptionTopic theTopic, int theEventsSinceSubscriptionStart) {
private SubscriptionStatus buildSubscriptionStatus(List<IBaseResource> theResources, ActiveSubscription theActiveSubscription, String theTopicUrl, int theEventsSinceSubscriptionStart) {
SubscriptionStatus subscriptionStatus = new SubscriptionStatus();
subscriptionStatus.setId(UUID.randomUUID().toString());
subscriptionStatus.setStatus(Enumerations.SubscriptionStatusCodes.ACTIVE);
@ -87,9 +91,14 @@ public class SubscriptionTopicPayloadBuilder {
// WIP STR5 count events since subscription start and set eventsSinceSubscriptionStart
// store counts by subscription id
subscriptionStatus.setEventsSinceSubscriptionStart(theEventsSinceSubscriptionStart);
subscriptionStatus.addNotificationEvent().setEventNumber(theEventsSinceSubscriptionStart).setFocus(new Reference(theMatchedResource.getIdElement()));
SubscriptionStatus.SubscriptionStatusNotificationEventComponent event = subscriptionStatus.addNotificationEvent();
event.setEventNumber(theEventsSinceSubscriptionStart);
if (theResources.size() > 0) {
event.setFocus(new Reference(theResources.get(0).getIdElement()));
}
subscriptionStatus.setSubscription(new Reference(theActiveSubscription.getSubscription().getIdElement(myFhirContext)));
subscriptionStatus.setTopic(theTopic.getUrl());
subscriptionStatus.setTopic(theTopicUrl);
return subscriptionStatus;
}
}

View File

@ -24,14 +24,15 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
import ca.uhn.fhir.storage.PreviousVersionReader;
import ca.uhn.fhir.util.Logs;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Enumeration;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.slf4j.Logger;
import java.util.List;
import java.util.Optional;
public class SubscriptionTriggerMatcher {
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
@ -42,6 +43,7 @@ public class SubscriptionTriggerMatcher {
private final String myResourceName;
private final IBaseResource myResource;
private final IFhirResourceDao myDao;
private final PreviousVersionReader myPreviousVersionReader;
private final SystemRequestDetails mySrd;
public SubscriptionTriggerMatcher(SubscriptionTopicSupport theSubscriptionTopicSupport, ResourceModifiedMessage theMsg, SubscriptionTopic.SubscriptionTopicResourceTriggerComponent theTrigger) {
@ -51,6 +53,7 @@ public class SubscriptionTriggerMatcher {
myResourceName = myResource.fhirType();
myDao = mySubscriptionTopicSupport.getDaoRegistry().getResourceDao(myResourceName);
myTrigger = theTrigger;
myPreviousVersionReader = new PreviousVersionReader(myDao);
mySrd = new SystemRequestDetails();
}
@ -83,12 +86,10 @@ public class SubscriptionTriggerMatcher {
if (previousCriteria != null) {
if (myOperation == ResourceModifiedMessage.OperationTypeEnum.UPDATE ||
myOperation == ResourceModifiedMessage.OperationTypeEnum.DELETE) {
Long currentVersion = myResource.getIdElement().getVersionIdPartAsLong();
if (currentVersion > 1) {
IIdType previousVersionId = myResource.getIdElement().withVersion("" + (currentVersion - 1));
// WIP STR5 should we use the partition id from the resource? Ideally we should have a "previous version" service we can use for this
IBaseResource previousVersion = myDao.read(previousVersionId, new SystemRequestDetails());
previousMatches = matchResource(previousVersion, previousCriteria);
Optional<IBaseResource> oPreviousVersion = myPreviousVersionReader.readPreviousVersion(myResource);
if (oPreviousVersion.isPresent()) {
previousMatches = matchResource(oPreviousVersion.get(), previousCriteria);
} else {
ourLog.warn("Resource {} has a version of 1, which should not be the case for a create or delete operation", myResource.getIdElement().toUnqualifiedVersionless());
}

View File

@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamProvider;
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender;
import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig;
import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionQueryValidator;
import org.junit.jupiter.api.Test;
@ -85,6 +86,10 @@ public class DaoSubscriptionMatcherTest {
return mock(IRequestPartitionHelperSvc.class);
}
@Bean
public IEmailSender emailSender(){
return mock(IEmailSender.class);
}
}
}

View File

@ -11,6 +11,7 @@ import ca.uhn.fhir.jpa.subscription.channel.impl.LinkedBlockingChannelFactory;
import ca.uhn.fhir.jpa.subscription.channel.subscription.IChannelNamer;
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender;
import ca.uhn.fhir.jpa.subscription.module.config.MockFhirClientSearchParamProvider;
import ca.uhn.fhir.jpa.subscription.util.SubscriptionDebugLogInterceptor;
import ca.uhn.fhir.model.primitive.IdDt;
@ -101,5 +102,10 @@ public abstract class BaseSubscriptionTest {
public IChannelNamer channelNamer() {
return (theNameComponent, theChannelSettings) -> theNameComponent;
}
@Bean
public IEmailSender emailSender(){
return mock(IEmailSender.class);
}
}
}

View File

@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig;
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender;
import ca.uhn.fhir.jpa.subscription.match.deliver.websocket.WebsocketConnectionValidator;
import ca.uhn.fhir.jpa.subscription.match.deliver.websocket.WebsocketValidationResponse;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
@ -140,6 +141,10 @@ public class WebsocketConnectionValidatorTest {
public IResourceChangeListenerRegistry resourceChangeListenerRegistry() {
return mock(IResourceChangeListenerRegistry.class, RETURNS_DEEP_STUBS);
}
@Bean
public IEmailSender emailSender(){
return mock(IEmailSender.class);
}
}
}

View File

@ -3,13 +3,11 @@ package ca.uhn.fhir.jpa.topic;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.util.BundleUtil;
import org.hl7.fhir.r4b.model.Bundle;
import org.hl7.fhir.r4b.model.Encounter;
import org.hl7.fhir.r4b.model.Resource;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.junit.jupiter.api.Test;
import java.util.List;
@ -17,6 +15,7 @@ import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
class SubscriptionTopicPayloadBuilderR4BTest {
private static final String TEST_TOPIC_URL = "test-builder-topic-url";
FhirContext ourFhirContext = FhirContext.forR4BCached();
@Test
public void testBuildPayloadDelete() {
@ -24,14 +23,11 @@ class SubscriptionTopicPayloadBuilderR4BTest {
var svc = new SubscriptionTopicPayloadBuilder(ourFhirContext);
var encounter = new Encounter();
encounter.setId("Encounter/1");
ResourceModifiedMessage msg = new ResourceModifiedMessage();
CanonicalSubscription sub = new CanonicalSubscription();
ActiveSubscription subscription = new ActiveSubscription(sub, "test");
SubscriptionTopic topic = new SubscriptionTopic();
msg.setOperationType(BaseResourceMessage.OperationTypeEnum.DELETE);
// run
Bundle payload = (Bundle)svc.buildPayload(encounter, msg, subscription, topic);
Bundle payload = (Bundle)svc.buildPayload(List.of(encounter), subscription, TEST_TOPIC_URL, RestOperationTypeEnum.DELETE);
// verify
List<Resource> resources = BundleUtil.toListOfResourcesOfType(ourFhirContext, payload, Resource.class);
@ -47,14 +43,11 @@ class SubscriptionTopicPayloadBuilderR4BTest {
var svc = new SubscriptionTopicPayloadBuilder(ourFhirContext);
var encounter = new Encounter();
encounter.setId("Encounter/1");
ResourceModifiedMessage msg = new ResourceModifiedMessage();
CanonicalSubscription sub = new CanonicalSubscription();
ActiveSubscription subscription = new ActiveSubscription(sub, "test");
SubscriptionTopic topic = new SubscriptionTopic();
msg.setOperationType(BaseResourceMessage.OperationTypeEnum.UPDATE);
// run
Bundle payload = (Bundle)svc.buildPayload(encounter, msg, subscription, topic);
Bundle payload = (Bundle)svc.buildPayload(List.of(encounter), subscription, TEST_TOPIC_URL, RestOperationTypeEnum.UPDATE);
// verify
List<Resource> resources = BundleUtil.toListOfResourcesOfType(ourFhirContext, payload, Resource.class);
@ -71,14 +64,11 @@ class SubscriptionTopicPayloadBuilderR4BTest {
var svc = new SubscriptionTopicPayloadBuilder(ourFhirContext);
var encounter = new Encounter();
encounter.setId("Encounter/1");
ResourceModifiedMessage msg = new ResourceModifiedMessage();
CanonicalSubscription sub = new CanonicalSubscription();
ActiveSubscription subscription = new ActiveSubscription(sub, "test");
SubscriptionTopic topic = new SubscriptionTopic();
msg.setOperationType(BaseResourceMessage.OperationTypeEnum.CREATE);
// run
Bundle payload = (Bundle)svc.buildPayload(encounter, msg, subscription, topic);
Bundle payload = (Bundle)svc.buildPayload(List.of(encounter), subscription, TEST_TOPIC_URL, RestOperationTypeEnum.CREATE);
// verify
List<Resource> resources = BundleUtil.toListOfResourcesOfType(ourFhirContext, payload, Resource.class);

View File

@ -3,13 +3,11 @@ package ca.uhn.fhir.jpa.topic;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.util.BundleUtil;
import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.Encounter;
import org.hl7.fhir.r5.model.Resource;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.junit.jupiter.api.Test;
import java.util.List;
@ -17,6 +15,7 @@ import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
class SubscriptionTopicPayloadBuilderR5Test {
private static final String TEST_TOPIC_URL = "test-builder-topic-url";
FhirContext ourFhirContext = FhirContext.forR5Cached();
@Test
public void testBuildPayloadDelete() {
@ -24,14 +23,11 @@ class SubscriptionTopicPayloadBuilderR5Test {
var svc = new SubscriptionTopicPayloadBuilder(ourFhirContext);
var encounter = new Encounter();
encounter.setId("Encounter/1");
ResourceModifiedMessage msg = new ResourceModifiedMessage();
CanonicalSubscription sub = new CanonicalSubscription();
ActiveSubscription subscription = new ActiveSubscription(sub, "test");
SubscriptionTopic topic = new SubscriptionTopic();
msg.setOperationType(BaseResourceMessage.OperationTypeEnum.DELETE);
// run
Bundle payload = (Bundle)svc.buildPayload(encounter, msg, subscription, topic);
Bundle payload = (Bundle)svc.buildPayload(List.of(encounter), subscription, TEST_TOPIC_URL, RestOperationTypeEnum.DELETE);
// verify
List<Resource> resources = BundleUtil.toListOfResourcesOfType(ourFhirContext, payload, Resource.class);
@ -47,14 +43,11 @@ class SubscriptionTopicPayloadBuilderR5Test {
var svc = new SubscriptionTopicPayloadBuilder(ourFhirContext);
var encounter = new Encounter();
encounter.setId("Encounter/1");
ResourceModifiedMessage msg = new ResourceModifiedMessage();
CanonicalSubscription sub = new CanonicalSubscription();
ActiveSubscription subscription = new ActiveSubscription(sub, "test");
SubscriptionTopic topic = new SubscriptionTopic();
msg.setOperationType(BaseResourceMessage.OperationTypeEnum.UPDATE);
// run
Bundle payload = (Bundle)svc.buildPayload(encounter, msg, subscription, topic);
Bundle payload = (Bundle)svc.buildPayload(List.of(encounter), subscription, TEST_TOPIC_URL, RestOperationTypeEnum.UPDATE);
// verify
List<Resource> resources = BundleUtil.toListOfResourcesOfType(ourFhirContext, payload, Resource.class);
@ -71,14 +64,11 @@ class SubscriptionTopicPayloadBuilderR5Test {
var svc = new SubscriptionTopicPayloadBuilder(ourFhirContext);
var encounter = new Encounter();
encounter.setId("Encounter/1");
ResourceModifiedMessage msg = new ResourceModifiedMessage();
CanonicalSubscription sub = new CanonicalSubscription();
ActiveSubscription subscription = new ActiveSubscription(sub, "test");
SubscriptionTopic topic = new SubscriptionTopic();
msg.setOperationType(BaseResourceMessage.OperationTypeEnum.CREATE);
// run
Bundle payload = (Bundle)svc.buildPayload(encounter, msg, subscription, topic);
Bundle payload = (Bundle)svc.buildPayload(List.of(encounter), subscription, TEST_TOPIC_URL, RestOperationTypeEnum.CREATE);
// verify
List<Resource> resources = BundleUtil.toListOfResourcesOfType(ourFhirContext, payload, Resource.class);

View File

@ -18,6 +18,7 @@ import org.mockito.junit.jupiter.MockitoExtension;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -119,7 +120,7 @@ class SubscriptionTriggerMatcherTest {
IFhirResourceDao mockEncounterDao = mock(IFhirResourceDao.class);
when(myDaoRegistry.getResourceDao("Encounter")).thenReturn(mockEncounterDao);
Encounter encounterPreviousVersion = new Encounter();
when(mockEncounterDao.read(any(), any())).thenReturn(encounterPreviousVersion);
when(mockEncounterDao.read(any(), any(), eq(false))).thenReturn(encounterPreviousVersion);
when(mySearchParamMatcher.match(any(), any(), any())).thenReturn(InMemoryMatchResult.successfulMatch());
// run

View File

@ -151,6 +151,7 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
request.setPageStart(index);
request.setBatchSize(size);
request.setSort(Sort.unsorted());
request.setJobStatus("");
Page<JobInstance> page;
Iterator<JobInstance> iterator;
@ -219,6 +220,15 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
// Since there was only one chunk, the job should proceed without requiring a maintenance pass
myBatch2JobHelper.awaitJobCompletion(batchJobId);
myLastStepLatch.awaitExpected();
final List<JobInstance> jobInstances = myJobPersistence.fetchInstances(10, 0);
assertEquals(1, jobInstances.size());
final JobInstance jobInstance = jobInstances.get(0);
assertEquals(StatusEnum.COMPLETED, jobInstance.getStatus());
assertEquals(1.0, jobInstance.getProgress());
}
private void createThreeStepReductionJob(
@ -360,6 +370,15 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
testInfo + i
));
}
final List<JobInstance> jobInstances = myJobPersistence.fetchInstances(10, 0);
assertEquals(1, jobInstances.size());
final JobInstance jobInstance = jobInstances.get(0);
assertEquals(StatusEnum.COMPLETED, jobInstance.getStatus());
assertEquals(1.0, jobInstance.getProgress());
}
@Test

View File

@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.batch2;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.model.BulkExportParameters;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
@ -33,6 +34,7 @@ import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
@ -44,6 +46,7 @@ import java.util.concurrent.TimeUnit;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.emptyOrNullString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ -181,7 +184,8 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
private void verifyBulkExportResults(String theInstanceId, List<String> theContainedList, List<String> theExcludedList) {
// Iterate over the files
String report = myJobRunner.getJobInfo(theInstanceId).getReport();
Batch2JobInfo jobInfo = myJobRunner.getJobInfo(theInstanceId);
String report = jobInfo.getReport();
ourLog.debug("Export job {} report: {}", theInstanceId, report);
if (!theContainedList.isEmpty()) {
assertThat("report for instance " + theInstanceId + " is empty", report, not(emptyOrNullString()));
@ -227,6 +231,10 @@ public class BulkDataErrorAbuseTest extends BaseResourceProviderR4Test {
for (String excludedString : theExcludedList) {
assertThat("export doesn't have expected ids", foundIds, not(hasItem(excludedString)));
}
assertThat(jobInfo.getCombinedRecordsProcessed(), equalTo(2));
ourLog.info("Job {} ok", theInstanceId);
}
private String startJob(BulkDataExportOptions theOptions) {

View File

@ -10,6 +10,7 @@ import ca.uhn.fhir.batch2.model.WorkChunkCompletionEvent;
import ca.uhn.fhir.batch2.model.WorkChunkCreateEvent;
import ca.uhn.fhir.batch2.model.WorkChunkErrorEvent;
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
@ -27,7 +28,9 @@ import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.transaction.PlatformTransactionManager;
import javax.annotation.Nonnull;
@ -45,6 +48,7 @@ import java.util.Set;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
@ -267,6 +271,48 @@ public class JpaJobPersistenceImplTest extends BaseJpaR4Test {
assertEquals(instanceId, foundInstances.get(0).getInstanceId());
}
@Test
void testFetchInstancesWithEmptyStatus() {
createTwoJobsDifferentStatus();
JobInstanceFetchRequest request = createFetchRequest();
// Test
request.setJobStatus("");
Page<JobInstance> foundInstances = mySvc.fetchJobInstances(request);
assertThat(foundInstances.getTotalElements(), equalTo(2L));
}
@Test
void testFetchInstanceByStatus() {
createTwoJobsDifferentStatus();
JobInstanceFetchRequest request = createFetchRequest();
// Test
request.setJobStatus("COMPLETED");
Page<JobInstance> foundInstances = mySvc.fetchJobInstances(request);
assertThat(foundInstances.getTotalElements(), equalTo(1L));
}
private JobInstanceFetchRequest createFetchRequest() {
JobInstanceFetchRequest request = new JobInstanceFetchRequest();
request.setPageStart(0);
request.setBatchSize(1);
request.setSort(Sort.by(Sort.Direction.DESC, "myCreateTime"));
return request;
}
private void createTwoJobsDifferentStatus() {
JobInstance instance = new JobInstance();
instance.setStatus(StatusEnum.QUEUED);
instance.setJobDefinitionId(JOB_DEFINITION_ID);
JobInstance instance2 = new JobInstance();
instance2.setStatus(StatusEnum.COMPLETED);
instance2.setJobDefinitionId(JOB_DEFINITION_ID + "-2");
mySvc.storeNewInstance(instance);
mySvc.storeNewInstance(instance2);
}
/**
* Test bodies are defined in {@link AbstractIJobPersistenceSpecificationTest}.
* The nested test suite runs those tests here in a JPA context.

View File

@ -10,7 +10,7 @@ import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.binary.api.IBinaryStorageSvc;
import ca.uhn.fhir.jpa.binary.api.StoredDetails;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.mdm.util.MessageHelper;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@ -39,6 +39,7 @@ import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@ -66,8 +67,6 @@ public class BinaryAccessProviderTest {
@Spy
protected IBinaryStorageSvc myBinaryStorageSvc;
@Autowired
private MessageHelper myMessageHelper;
@Autowired
private IInterceptorBroadcaster myInterceptorBroadcaster;
@ -157,7 +156,7 @@ public class BinaryAccessProviderTest {
}
@Test
public void testBinaryAccessRead_WithoutAttachmentId_NullData() throws IOException {
public void testBinaryAccessRead_WithoutAttachmentId_NullData() {
DocumentReference docRef = new DocumentReference();
DocumentReference.DocumentReferenceContentComponent content = docRef.addContent();
content.getAttachment().setContentType("application/octet-stream");
@ -257,7 +256,7 @@ public class BinaryAccessProviderTest {
when(theServletRequest.getContentLength()).thenReturn(15);
when(myBinaryStorageSvc.shouldStoreBlob(15, docRef.getIdElement(), "Integer")).thenReturn(true);
myRequestDetails.setServletRequest(theServletRequest);
when(myBinaryStorageSvc.storeBlob(eq(docRef.getIdElement()), isNull(), eq("Integer"), any(InputStream.class))).thenReturn(sd);
doReturn(sd).when(myBinaryStorageSvc).storeBlob(eq(docRef.getIdElement()), isNull(), eq("Integer"), any(InputStream.class), any(RequestDetails.class));
myRequestDetails.setRequestContents(SOME_BYTES);
try {
@ -266,7 +265,7 @@ public class BinaryAccessProviderTest {
assertEquals(docRef.getId(), outcome.getIdElement().getValue());
} catch (IOException e) {
}
verify(myBinaryStorageSvc, times(1)).storeBlob(any(), any(), any(), any());
verify(myBinaryStorageSvc, times(1)).storeBlob(any(), any(), any(), any(), any(ServletRequestDetails.class));
}
@Test

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.binary.api.StoredDetails;
import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.hl7.fhir.r4.model.IdType;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
@ -52,7 +53,7 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
String contentType = "image/png";
IdType resourceId = new IdType("Binary/123");
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream);
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails());
myCaptureQueriesListener.logAllQueriesForCurrentThread();
@ -105,7 +106,7 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
String contentType = "image/png";
IdType resourceId = new IdType("Binary/123");
StoredDetails outcome = mySvc.storeBlob(resourceId, "ABCDEFG", contentType, inputStream);
StoredDetails outcome = mySvc.storeBlob(resourceId, "ABCDEFG", contentType, inputStream, new ServletRequestDetails());
assertEquals("ABCDEFG", outcome.getBlobId());
myCaptureQueriesListener.logAllQueriesForCurrentThread();
@ -163,7 +164,7 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
String contentType = "image/png";
IdType resourceId = new IdType("Binary/123");
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream);
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails());
String blobId = outcome.getBlobId();
// Expunge
@ -185,7 +186,7 @@ public class DatabaseBlobBinaryStorageSvcImplTest extends BaseJpaR4Test {
ByteArrayInputStream inputStream = new ByteArrayInputStream(SOME_BYTES);
String contentType = "image/png";
IdType resourceId = new IdType("Binary/123");
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream);
StoredDetails outcome = mySvc.storeBlob(resourceId, null, contentType, inputStream, new ServletRequestDetails());
// Right ID
ByteArrayOutputStream capture = new ByteArrayOutputStream();

View File

@ -1,9 +1,12 @@
package ca.uhn.fhir.jpa.binstore;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.executor.InterceptorService;
import ca.uhn.fhir.jpa.binary.api.StoredDetails;
import ca.uhn.fhir.rest.server.exceptions.PayloadTooLargeException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.apache.commons.io.FileUtils;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.IdType;
@ -34,6 +37,8 @@ public class FilesystemBinaryStorageSvcImplTest {
public void before() {
myPath = new File("./target/fstmp");
mySvc = new FilesystemBinaryStorageSvcImpl(myPath.getAbsolutePath());
mySvc.setFhirContextForTests(FhirContext.forR4Cached());
mySvc.setInterceptorBroadcasterForTests(new InterceptorService());
}
@AfterEach
@ -45,7 +50,7 @@ public class FilesystemBinaryStorageSvcImplTest {
public void testStoreAndRetrieve() throws IOException {
IIdType id = new IdType("Patient/123");
String contentType = "image/png";
StoredDetails outcome = mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES));
StoredDetails outcome = mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
ourLog.info("Got id: {}", outcome);
@ -68,7 +73,7 @@ public class FilesystemBinaryStorageSvcImplTest {
IIdType id = new IdType("Patient/123");
String contentType = "image/png";
String blobId = "ABCDEFGHIJKLMNOPQRSTUV";
StoredDetails outcome = mySvc.storeBlob(id, blobId, contentType, new ByteArrayInputStream(SOME_BYTES));
StoredDetails outcome = mySvc.storeBlob(id, blobId, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
assertEquals(blobId, outcome.getBlobId());
ourLog.info("Got id: {}", outcome);
@ -103,7 +108,7 @@ public class FilesystemBinaryStorageSvcImplTest {
public void testExpunge() throws IOException {
IIdType id = new IdType("Patient/123");
String contentType = "image/png";
StoredDetails outcome = mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES));
StoredDetails outcome = mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
ourLog.info("Got id: {}", outcome);
@ -129,7 +134,7 @@ public class FilesystemBinaryStorageSvcImplTest {
IIdType id = new IdType("Patient/123");
String contentType = "image/png";
try {
mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES));
mySvc.storeBlob(id, null, contentType, new ByteArrayInputStream(SOME_BYTES), new ServletRequestDetails());
fail();
} catch (PayloadTooLargeException e) {
assertEquals(Msg.code(1343) + "Binary size exceeds maximum: 5", e.getMessage());

View File

@ -9,7 +9,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
public class NullBinaryStorageSvcImplTest {
private NullBinaryStorageSvcImpl mySvc = new NullBinaryStorageSvcImpl();
private final NullBinaryStorageSvcImpl mySvc = new NullBinaryStorageSvcImpl();
@Test
public void shouldStoreBlob() {
@ -18,43 +18,31 @@ public class NullBinaryStorageSvcImplTest {
@Test
public void storeBlob() {
assertThrows(UnsupportedOperationException.class, () -> {
mySvc.storeBlob(null, null, null, null);
});
assertThrows(UnsupportedOperationException.class, () -> mySvc.storeBlob(null, null, null, null, null));
}
@Test
public void fetchBlobDetails() {
assertThrows(UnsupportedOperationException.class, () -> {
mySvc.fetchBlobDetails(null, null);
});
assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBlobDetails(null, null));
}
@Test
public void writeBlob() {
assertThrows(UnsupportedOperationException.class, () -> {
mySvc.writeBlob(null, null, null);
});
assertThrows(UnsupportedOperationException.class, () -> mySvc.writeBlob(null, null, null));
}
@Test
public void expungeBlob() {
assertThrows(UnsupportedOperationException.class, () -> {
mySvc.expungeBlob(null, null);
});
assertThrows(UnsupportedOperationException.class, () -> mySvc.expungeBlob(null, null));
}
@Test
public void fetchBlob() {
assertThrows(UnsupportedOperationException.class, () -> {
mySvc.fetchBlob(null, null);
});
assertThrows(UnsupportedOperationException.class, () -> mySvc.fetchBlob(null, null));
}
@Test
public void newBlobId() {
assertThrows(UnsupportedOperationException.class, () -> {
mySvc.newBlobId();
});
assertThrows(UnsupportedOperationException.class, () -> mySvc.newBlobId());
}
}

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.Batch2JobInfo;
import ca.uhn.fhir.jpa.api.model.Batch2JobOperationResult;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
@ -98,6 +99,8 @@ public class BulkDataExportProviderTest {
private final HttpClientExtension myClient = new HttpClientExtension();
@Mock
private IBatch2JobRunner myJobRunner;
@Mock
IFhirResourceDao myFhirResourceDao;
@InjectMocks
private BulkDataExportProvider myProvider;
@RegisterExtension
@ -140,6 +143,8 @@ public class BulkDataExportProviderTest {
myProvider.setStorageSettings(myStorageSettings);
DaoRegistry daoRegistry = mock(DaoRegistry.class);
lenient().when(daoRegistry.getRegisteredDaoTypes()).thenReturn(Set.of("Patient", "Observation", "Encounter"));
lenient().when(daoRegistry.getResourceDao(anyString())).thenReturn(myFhirResourceDao);
myProvider.setDaoRegistry(daoRegistry);
}

View File

@ -19,12 +19,13 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.parser.IParser;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.util.BundleBuilder;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.SearchParameterUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.collect.Sets;
import org.apache.commons.io.Charsets;
import org.apache.commons.io.IOUtils;
@ -43,8 +44,12 @@ import org.hl7.fhir.r4.model.Extension;
import org.hl7.fhir.r4.model.Group;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.InstantType;
import org.hl7.fhir.r4.model.Meta;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Practitioner;
import org.hl7.fhir.r4.model.Reference;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@ -81,6 +86,7 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@ -507,6 +513,9 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
myStorageSettings.setBulkExportFileMaximumCapacity(JpaStorageSettings.DEFAULT_BULK_EXPORT_FILE_MAXIMUM_CAPACITY);
}
// TODO reenable 4637
// Reenable when bulk exports that return no results work as expected
@Disabled
@Test
public void testPatientExportIgnoresResourcesNotInPatientCompartment() {
Patient patient = new Patient();
@ -522,6 +531,7 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
obs2.setId("obs-excluded");
myObservationDao.update(obs2);
// test
HashSet<String> types = Sets.newHashSet("Patient", "Observation");
BulkExportJobResults bulkExportJobResults = startPatientBulkExportJobAndAwaitResults(types, new HashSet<String>(), "ha");
Map<String, List<IBaseResource>> typeToResources = convertJobResultsToResources(bulkExportJobResults);
@ -887,30 +897,163 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
assertThat(typeToContents.get("Observation"), containsString("obs-included"));
assertThat(typeToContents.get("Observation"), not(containsString("obs-excluded")));
}
@Test
public void testGroupBulkExportWithTypeFilter_ReturnsOnlyResourcesInTypeFilter() {
// setup
IParser parser = myFhirContext.newJsonParser();
{
String patientStr = """
{
"resourceType": "Patient",
"id": "f201"
}
""";
Patient patient = parser.parseResource(Patient.class, patientStr);
myClient.update().resource(patient).execute();
}
{
String practitionerStr = """
{
"resourceType": "Practitioner",
"id": "f201"
}
""";
Practitioner practitioner = parser.parseResource(Practitioner.class, practitionerStr);
myClient.update().resource(practitioner).execute();
}
{
String orgString = """
{
"resourceType": "Organization",
"id": "f201"
}
""";
Organization organization = parser.parseResource(Organization.class, orgString);
myClient.update().resource(organization).execute();
}
{
String bundleStr = """
{
"resourceType": "Bundle",
"id": "bundle-transaction",
"meta": {
"lastUpdated": "2021-04-19T20:24:48.194+00:00"
},
"type": "transaction",
"entry": [
{
"fullUrl": "http://example.org/fhir/Encounter/E1",
"resource": {
"resourceType": "Encounter",
"id": "E1",
"subject": {
"reference": "Patient/f201",
"display": "Roel"
},
"participant": [
{
"individual": {
"reference": "Practitioner/f201"
}
}
],
"serviceProvider": {
"reference": "Organization/f201"
}
},
"request": {
"method": "PUT",
"url": "Encounter/E1"
}
},
{
"fullUrl": "http://example.org/fhir/Encounter/E2",
"resource": {
"resourceType": "Encounter",
"id": "E2",
"subject": {
"reference": "Patient/f201",
"display": "Roel"
},
"participant": [
{
"individual": {
"reference": "Practitioner/f201"
}
}
],
"serviceProvider": {
"reference": "Organization/f201"
}
},
"request": {
"method": "PUT",
"url": "Encounter/A2"
}
},
{
"fullUrl": "http://example.org/fhir/Group/G3",
"resource": {
"resourceType": "Group",
"id": "G3",
"text": {
"status": "additional"
},
"type": "person",
"actual": true,
"member": [
{
"entity": {
"reference": "Patient/f201"
},
"period": {
"start": "2021-01-01"
}
},
{
"entity": {
"reference": "Patient/f201"
},
"period": {
"start": "2021-01-01"
}
}
]
},
"request": {
"method": "PUT",
"url": "Group/G3"
}
}
]
}
""";
Bundle bundle = parser.parseResource(Bundle.class, bundleStr);
myClient.transaction().withBundle(bundle).execute();
}
// test
HashSet<String> resourceTypes = Sets.newHashSet("Encounter");
BulkExportJobResults results = startGroupBulkExportJobAndAwaitCompletion(
resourceTypes,
new HashSet<>(),
"G3" // ID from Transaction Bundle
);
Map<String, List<IBaseResource>> stringListMap = convertJobResultsToResources(results);
assertFalse(stringListMap.containsKey("Organization"), String.join(",", stringListMap.keySet()));
assertFalse(stringListMap.containsKey("Patient"), String.join(",", stringListMap.keySet()));
assertTrue(stringListMap.containsKey("Encounter"), String.join(",", stringListMap.keySet()));
assertThat(stringListMap.get("Encounter"), hasSize(2));
}
@Test
public void testGroupBulkExportWithTypeFilter() {
// Create some resources
Patient patient = new Patient();
patient.setId("PF");
patient.setGender(Enumerations.AdministrativeGender.FEMALE);
patient.setActive(true);
myClient.update().resource(patient).execute();
patient = new Patient();
patient.setId("PM");
patient.setGender(Enumerations.AdministrativeGender.MALE);
patient.setActive(true);
myClient.update().resource(patient).execute();
Group group = new Group();
group.setId("Group/G");
group.setActive(true);
group.addMember().getEntity().setReference("Patient/PF");
group.addMember().getEntity().setReference("Patient/PM");
myClient.update().resource(group).execute();
Group g = createGroupWithPatients();
String groupId = g.getIdPart();
//Create an observation for each patient
Observation femaleObs = new Observation();
@ -923,9 +1066,11 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
maleObs.setId("obs-male");
myClient.update().resource(maleObs).execute();
// test
HashSet<String> resourceTypes = Sets.newHashSet("Observation", "Patient");
HashSet<String> filters = Sets.newHashSet("Patient?gender=female");
BulkExportJobResults results = startGroupBulkExportJobAndAwaitCompletion(resourceTypes, filters, "G");
BulkExportJobResults results = startGroupBulkExportJobAndAwaitCompletion(resourceTypes, filters, groupId);
Map<String, List<IBaseResource>> stringListMap = convertJobResultsToResources(results);
assertThat(stringListMap.get("Observation"), hasSize(1));
assertThat(stringListMap.get("Patient"), hasSize(1));
@ -978,10 +1123,8 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
coverage.setId("coverage-female");
myClient.update().resource(coverage).execute();
HashSet<String> resourceTypes = Sets.newHashSet(SearchParameterUtil.getAllResourceTypesThatAreInPatientCompartment(myFhirContext));
HashSet<String> filters = Sets.newHashSet();
BulkExportJobResults results = startGroupBulkExportJobAndAwaitCompletion(resourceTypes, filters, "G");
BulkExportJobResults results = startGroupBulkExportJobAndAwaitCompletion(new HashSet<>(), filters, "G");
Map<String, List<IBaseResource>> typeToResource = convertJobResultsToResources(results);
assertThat(typeToResource.keySet(), hasSize(4));
assertThat(typeToResource.get("Group"), hasSize(1));
@ -1053,7 +1196,6 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
coverage.setId("coverage-included");
myClient.update().resource(coverage).execute();
HashSet<String> resourceTypes = Sets.newHashSet("Observation", "Coverage");
BulkExportJobResults bulkExportJobResults = startGroupBulkExportJobAndAwaitCompletion(resourceTypes, new HashSet<>(), "G2");
@ -1159,6 +1301,29 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
}
private Group createGroupWithPatients() {
Patient patient = new Patient();
patient.setId("PF");
patient.setGender(Enumerations.AdministrativeGender.FEMALE);
patient.setActive(true);
myClient.update().resource(patient).execute();
patient = new Patient();
patient.setId("PM");
patient.setGender(Enumerations.AdministrativeGender.MALE);
patient.setActive(true);
myClient.update().resource(patient).execute();
Group group = new Group();
group.setId("Group/G");
group.setActive(true);
group.addMember().getEntity().setReference("Patient/PF");
group.addMember().getEntity().setReference("Patient/PM");
myClient.update().resource(group).execute();
return group;
}
private Map<String, String> convertJobResultsToStringContents(BulkExportJobResults theResults) {
Map<String, String> typeToResources = new HashMap<>();
for (Map.Entry<String, List<String>> entry : theResults.getResourceTypeToBinaryIds().entrySet()) {
@ -1206,29 +1371,91 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.SYSTEM, theResourceTypes, theFilters, null);
}
BulkExportJobResults startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle theExportStyle, Set<String> theResourceTypes, Set<String> theFilters, String theGroupOrPatientId) {
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(theResourceTypes);
options.setFilters(theFilters);
options.setExportStyle(theExportStyle);
BulkExportJobResults startBulkExportJobAndAwaitCompletion(
BulkDataExportOptions.ExportStyle theExportStyle,
Set<String> theResourceTypes,
Set<String> theFilters,
String theGroupOrPatientId
) {
Parameters parameters = new Parameters();
parameters.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, Constants.CT_FHIR_NDJSON);
if (theFilters != null && !theFilters.isEmpty()) {
for (String typeFilter : theFilters) {
parameters.addParameter(
JpaConstants.PARAM_EXPORT_TYPE_FILTER,
typeFilter
);
}
}
if (theResourceTypes != null && !theResourceTypes.isEmpty()) {
parameters.addParameter(
JpaConstants.PARAM_EXPORT_TYPE,
String.join(",", theResourceTypes)
);
}
MethodOutcome outcome;
if (theExportStyle == BulkDataExportOptions.ExportStyle.GROUP) {
options.setGroupId(new IdType("Group", theGroupOrPatientId));
}
if (theExportStyle == BulkDataExportOptions.ExportStyle.PATIENT && theGroupOrPatientId != null) {
outcome = myClient
.operation()
.onInstance("Group/" + theGroupOrPatientId)
.named(JpaConstants.OPERATION_EXPORT)
.withParameters(parameters)
.returnMethodOutcome()
.withAdditionalHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC)
.execute();
} else if (theExportStyle == BulkDataExportOptions.ExportStyle.PATIENT && theGroupOrPatientId != null) {
//TODO add support for this actual processor.
//options.setPatientId(new IdType("Patient", theGroupOrPatientId));
fail("Bulk Exports that return no data do not return");
outcome = myClient
.operation()
.onInstance("Patient/" + theGroupOrPatientId)
.named(JpaConstants.OPERATION_EXPORT)
.withParameters(parameters)
.returnMethodOutcome()
.withAdditionalHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC)
.execute();
} else {
// system request
outcome = myClient
.operation()
.onServer()
.named(JpaConstants.OPERATION_EXPORT)
.withParameters(parameters)
.returnMethodOutcome()
.withAdditionalHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC)
.execute();
}
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
assertNotNull(outcome);
assertEquals(202, outcome.getResponseStatusCode());
String pollLocation = null;
for (String header : outcome.getResponseHeaders().keySet()) {
// headers are in lowercase
// constants are in Pascal Case
// :(
if (header.equalsIgnoreCase(Constants.HEADER_CONTENT_LOCATION)) {
pollLocation = outcome.getResponseHeaders().get(header).get(0);
break;
}
}
assertNotNull(pollLocation);
UrlUtil.UrlParts parts = UrlUtil.parseUrl(pollLocation);
assertTrue(isNotBlank(parts.getParams()));
Map<String, String[]> queryParams = UrlUtil.parseQueryString(parts.getParams());
assertTrue(queryParams.containsKey(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID));
String jobInstanceId = queryParams.get(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID)[0];
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
assertNotNull(jobInstanceId);
assertNotNull(startResponse);
myBatch2JobHelper.awaitJobCompletion(jobInstanceId, 60);
myBatch2JobHelper.awaitJobCompletion(startResponse.getInstanceId(), 60);
await().atMost(300, TimeUnit.SECONDS).until(() -> myJobRunner.getJobInfo(jobInstanceId).getReport() != null);
await().atMost(300, TimeUnit.SECONDS).until(() -> myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport() != null);
String report = myJobRunner.getJobInfo(startResponse.getInstanceId()).getReport();
String report = myJobRunner.getJobInfo(jobInstanceId).getReport();
BulkExportJobResults results = JsonUtil.deserialize(report, BulkExportJobResults.class);
return results;
}

View File

@ -39,7 +39,7 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
@BeforeEach
@Override
public void before() throws ServletException {
public void before() throws Exception {
super.before();
myPartitionSettings.setPartitioningEnabled(false);
myStorageSettings.setInlineResourceTextBelowSize(10000);

View File

@ -23,13 +23,11 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.springframework.beans.factory.annotation.Autowired;
import javax.servlet.ServletException;
import java.time.LocalDate;
import java.time.Month;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Consumer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@ -75,8 +73,10 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
myStorageSettings.setMatchUrlCacheEnabled(new JpaStorageSettings().getMatchUrlCache());
}
@Override
@BeforeEach
public void before() throws ServletException {
public void before() throws Exception {
super.before();
myPartitionSettings.setPartitioningEnabled(true);
myPartitionSettings.setIncludePartitionInSearchHashes(new PartitionSettings().isIncludePartitionInSearchHashes());
@ -183,7 +183,7 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
when(mySrd.getRequestId()).thenReturn("REQUEST_ID");
}
protected Consumer<IBaseResource> withPartition(Integer thePartitionId) {
protected ICreationArgument withPartition(Integer thePartitionId) {
return t -> {
if (thePartitionId != null) {
addCreatePartition(thePartitionId, null);

View File

@ -14,6 +14,7 @@ import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.entity.TermValueSet;
@ -100,6 +101,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@ -176,6 +178,61 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myValidationSupport.fetchAllStructureDefinitions();
}
/**
* See the class javadoc before changing the counts in this test!
*/
@Test
public void testExpungeAllVersionsWithTagsDeletesRow() {
// Setup
// Create then delete
for (int i = 0; i < 5; i++) {
Patient p = new Patient();
p.setId("TEST" + i);
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
p.addName().setFamily("FOO");
myPatientDao.update(p).getId();
for (int j = 0; j < 5; j++) {
p.setActive(!p.getActive());
myPatientDao.update(p);
}
myPatientDao.delete(new IdType("Patient/TEST" + i));
}
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), not(empty())));
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), not(empty())));
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), not(empty())));
logAllResources();
// Test
myCaptureQueriesListener.clear();
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true), null);
// Verify
/*
* Note: $expunge is still pretty inefficient. We load all the HFJ_RESOURCE entities
* in one shot, but we then load HFJ_RES_VER entities one by one and delete the FK
* constraints on both HFJ_RESOURCE and HFJ_RES_VER one by one. This could definitely
* stand to be optimized. The one gotcha is that we call an interceptor for each
* version being deleted (I think so that MDM can do cleanup?) so we need to be careful
* about any batch deletes.
*/
assertEquals(47, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(85, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), empty()));
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), empty()));
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), empty()));
}
/**
* See the class javadoc before changing the counts in this test!
*/
@ -953,14 +1010,14 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
@ParameterizedTest
@CsvSource({
// NoOp OptimisticLock OptimizeMode ExpectedSelect ExpectedUpdate
" false, false, CURRENT_VERSION, 2, 10",
" false, false, CURRENT_VERSION, 2, 1",
" true, false, CURRENT_VERSION, 2, 0",
" false, true, CURRENT_VERSION, 12, 10",
" false, true, CURRENT_VERSION, 12, 1",
" true, true, CURRENT_VERSION, 12, 0",
" false, false, ALL_VERSIONS, 22, 20",
" true, false, ALL_VERSIONS, 22, 0",
" false, true, ALL_VERSIONS, 32, 20",
" true, true, ALL_VERSIONS, 32, 0",
" false, false, ALL_VERSIONS, 12, 10",
" true, false, ALL_VERSIONS, 12, 0",
" false, true, ALL_VERSIONS, 22, 10",
" true, true, ALL_VERSIONS, 22, 0",
})
public void testReindexJob_OptimizeStorage(boolean theNoOp, boolean theOptimisticLock, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageModeEnum, int theExpectedSelectCount, int theExpectedUpdateCount) {
// Setup
@ -998,7 +1055,6 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
RunOutcome outcome = myReindexStep.doReindex(data, mock(IJobDataSink.class), "123", "456", params);
// validate
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(theExpectedSelectCount, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(theExpectedUpdateCount, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());

View File

@ -6,11 +6,15 @@ import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.hamcrest.collection.IsIterableContainingInAnyOrder;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.BodyStructure;
import org.hl7.fhir.r4.model.CarePlan;
import org.hl7.fhir.r4.model.Enumerations;
@ -22,7 +26,11 @@ import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.springframework.transaction.support.TransactionTemplate;
import java.sql.Date;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
@ -263,4 +271,43 @@ public class FhirResourceDaoR4SearchIncludeTest extends BaseJpaR4Test {
myCarePlanDao.update(carePlan);
}
}
/**
* https://github.com/hapifhir/hapi-fhir/issues/4896
*/
@Test
void testLastUpdatedDoesNotApplyToForwardOrRevIncludes() {
// given
Instant now = Instant.now();
IIdType org = createOrganization();
IIdType patId = createPatient(withReference("managingOrganization", org));
IIdType groupId = createGroup(withGroupMember(patId));
IIdType careTeam = createResource("CareTeam", withSubject(patId));
// backdate the Group and CareTeam
int updatedCount = new TransactionTemplate(myTxManager).execute((status)->
myEntityManager
.createQuery("update ResourceTable set myUpdated = :new_updated where myId in (:target_ids)")
.setParameter("new_updated", Date.from(now.minus(1, ChronoUnit.HOURS)))
.setParameter("target_ids", List.of(groupId.getIdPartAsLong(), careTeam.getIdPartAsLong(), org.getIdPartAsLong()))
.executeUpdate());
assertEquals(3, updatedCount, "backdated the Organization, CareTeam and Group");
// when
// "Patient?_lastUpdated=gt2023-01-01&_revinclude=Group:member&_revinclude=CareTeam:subject&_include=Patient:organization");
SearchParameterMap map = new SearchParameterMap();
map.setLastUpdated(new DateRangeParam(new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, Date.from(now))));
map.addInclude(new Include("Patient:organization"));
map.addRevInclude(new Include("Group:member"));
map.addRevInclude(new Include("CareTeam:subject"));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
List<String> ids = toUnqualifiedVersionlessIdValues(outcome);
// then
assertThat(ids, Matchers.containsInAnyOrder(patId.getValue(), groupId.getValue(), careTeam.getValue(), org.getValue()));
}
}

View File

@ -12,7 +12,7 @@ import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.storage.test.BaseDateSearchDaoTests;
import ca.uhn.fhir.storage.test.DaoTestDataBuilder;
import org.hl7.fhir.instance.model.api.IBaseResource;
import ca.uhn.fhir.test.utilities.ITestDataBuilder.ICreationArgument;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Observation;
import org.junit.jupiter.api.Disabled;
@ -30,7 +30,6 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.transaction.PlatformTransactionManager;
import java.util.List;
import java.util.function.Consumer;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
@ -194,7 +193,7 @@ public class FhirResourceDaoR4StandardQueriesNoFTTest extends BaseJpaTest {
}
@SafeVarargs
private IIdType withObservation(Consumer<IBaseResource>... theBuilder) {
private IIdType withObservation(ICreationArgument... theBuilder) {
myObservationId = myDataBuilder.createObservation(theBuilder);
return myObservationId;
}
@ -270,7 +269,7 @@ public class FhirResourceDaoR4StandardQueriesNoFTTest extends BaseJpaTest {
IIdType myResourceId;
private IIdType withRiskAssessmentWithProbabilty(double theValue) {
myResourceId = myDataBuilder.createResource("RiskAssessment", myDataBuilder.withPrimitiveAttribute("prediction.probabilityDecimal", theValue));
myResourceId = myDataBuilder.createResource("RiskAssessment", myDataBuilder.withResourcePrimitiveAttribute("prediction.probabilityDecimal", theValue));
return myResourceId;
}

Some files were not shown because too many files have changed in this diff Show More