6486 implement patient merge (#6568)
* begin with failing test * wpi merge operation provider * wip code resolve references and some refactoring * respect status code set in providers * more validation and code to update resources after refs are updated * added source-delete parameter and some validation tests on result-patient * update test * update test * return target patient as output parameter, do not add replacedby link to target if src is to be deleted * output definition * ignore identifiers in ref, more unit tests, return target in preview mode * output definition * return input parameters in the merge operation output * copy identifiers from source to target (no duplicate check) * add shared identifier * add shared identifier * moar asserts * moar asserts * fixing result patient * fixing result patient * fixing result patient * add duplicate check when copying source identifiers to target * copy result-patient before passing it to service * add preview to test * update preview asserts * move IReplaceReferencesSvc to storage-package, and call replaceReference in patient merge * moar tests * cleanup * add task test * add replace references test * kebab-case replace references * add sync -> async switch * test * test * start migrating to stream * validate result-patient does not have link to source if delete-source is true * add update count to msg in preview mode * spotless * change page size -> batch size and create new default max in storage settings * spotless * switched to patch transaction * switched to patch transaction * switched to patch transaction * switched to patch transaction * strip version from returned task * spotless * spotless * async replace references test passes * align tests * test small batch size * do src and target updates in trx, add validation src is not already replaced * start building batch 2 improve dao * start building batch 2 improve dao * start building batch 2 improve dao * move test setup out to helper * move replace references tests out * switch helper to use dao * incresed unit test coverage * build out batch * build out batch * update test to add link to result-patient only if the source is not to be deleted * batch passes test * batch passes test * batch passes test * made patient-merge return the Task returned from replace references * batch passes test * batch passes test * consolidate async and sync supporting methods into a single storage service * consolidate async and sync supporting methods into a single storage service * add merge batch job * add merge batch job * added FIXMES for ED * added FIXMES for ED * update test to not validate task in preview mode * mark identfier copied from source to target as old * copyright headers * runMaintenancePass before checking merge task status, and renamed test method for merge * fix test * fix test * default * invoke async merge job in merge service * starting work on new docs * starting work on new docs * document $replace-references * separate default from max * fixme * moar fixme * moar fixme * ken last fixme * return 202 status on async merge, update MergeBatchTest to do more validations * async success message, inject mergeservice, add async merge unit tests * make validatePatchResultBundle static again * added test for adding reference while merge in progress, and merge error handler * fixed * fixed * added error handler to replace references job * review * javadoc * organize imports * make ValidationResult immutable * make ValidationResult immutable * rename ReplaceReferencesRequest * rename ReplaceReferencesRequest * rename MergeResourceHelper * rename MergeResourceHelper * javadoc * split merge validation service out from merge service * split merge validation service out from merge service * split merge validation service out from merge service * review feedback * review feedback * rename param constants * rename param constants * remove IdentifierUtil for visibility * remove IdentifierUtil for visibility * final review of hapi side * review feedback * review feedback * fix checkstyle errors, rename MergeOperationInputParameters and add Msg.code * fix replace reference parameter type, and some copyright headers * review feedback * review feedback * review feedback * review feedback * review feedback * review feedback * review feedback * review feedback * move $merge into JPA R4 * move $merge into JPA R4 * still need to return 412 * still need to return 412 * return 412 * moar tests * moar tests * fix async batch size * fix async batch size * fix async batch size * exception code * exception code * fix bean wiring * fix bean wiring * fix test * fix test * fix test * fix test * fix test * bump pom versions --------- Co-authored-by: Ken Stevens <ken@smiledigitalhealth.com>
This commit is contained in:
parent
3594770bf6
commit
4feb489735
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -265,30 +265,48 @@ public class OperationOutcomeUtil {
|
|||
}
|
||||
|
||||
public static void addDetailsToIssue(FhirContext theFhirContext, IBase theIssue, String theSystem, String theCode) {
|
||||
addDetailsToIssue(theFhirContext, theIssue, theSystem, theCode, null);
|
||||
}
|
||||
|
||||
public static void addDetailsToIssue(
|
||||
FhirContext theFhirContext, IBase theIssue, String theSystem, String theCode, String theText) {
|
||||
BaseRuntimeElementCompositeDefinition<?> issueElement =
|
||||
(BaseRuntimeElementCompositeDefinition<?>) theFhirContext.getElementDefinition(theIssue.getClass());
|
||||
BaseRuntimeChildDefinition detailsChildDef = issueElement.getChildByName("details");
|
||||
|
||||
BaseRuntimeElementCompositeDefinition<?> codingDef =
|
||||
(BaseRuntimeElementCompositeDefinition<?>) theFhirContext.getElementDefinition("Coding");
|
||||
ICompositeType coding = (ICompositeType) codingDef.newInstance();
|
||||
|
||||
// System
|
||||
IPrimitiveType<?> system =
|
||||
(IPrimitiveType<?>) theFhirContext.getElementDefinition("uri").newInstance();
|
||||
system.setValueAsString(theSystem);
|
||||
codingDef.getChildByName("system").getMutator().addValue(coding, system);
|
||||
|
||||
// Code
|
||||
IPrimitiveType<?> code =
|
||||
(IPrimitiveType<?>) theFhirContext.getElementDefinition("code").newInstance();
|
||||
code.setValueAsString(theCode);
|
||||
codingDef.getChildByName("code").getMutator().addValue(coding, code);
|
||||
BaseRuntimeElementCompositeDefinition<?> ccDef =
|
||||
(BaseRuntimeElementCompositeDefinition<?>) theFhirContext.getElementDefinition("CodeableConcept");
|
||||
|
||||
ICompositeType codeableConcept = (ICompositeType) ccDef.newInstance();
|
||||
ccDef.getChildByName("coding").getMutator().addValue(codeableConcept, coding);
|
||||
|
||||
if (isNotBlank(theSystem) || isNotBlank(theCode)) {
|
||||
BaseRuntimeElementCompositeDefinition<?> codingDef =
|
||||
(BaseRuntimeElementCompositeDefinition<?>) theFhirContext.getElementDefinition("Coding");
|
||||
ICompositeType coding = (ICompositeType) codingDef.newInstance();
|
||||
|
||||
// System
|
||||
if (isNotBlank(theSystem)) {
|
||||
IPrimitiveType<?> system = (IPrimitiveType<?>)
|
||||
theFhirContext.getElementDefinition("uri").newInstance();
|
||||
system.setValueAsString(theSystem);
|
||||
codingDef.getChildByName("system").getMutator().addValue(coding, system);
|
||||
}
|
||||
|
||||
// Code
|
||||
if (isNotBlank(theCode)) {
|
||||
IPrimitiveType<?> code = (IPrimitiveType<?>)
|
||||
theFhirContext.getElementDefinition("code").newInstance();
|
||||
code.setValueAsString(theCode);
|
||||
codingDef.getChildByName("code").getMutator().addValue(coding, code);
|
||||
}
|
||||
|
||||
ccDef.getChildByName("coding").getMutator().addValue(codeableConcept, coding);
|
||||
}
|
||||
|
||||
if (isNotBlank(theText)) {
|
||||
IPrimitiveType<?> textElem = (IPrimitiveType<?>)
|
||||
ccDef.getChildByName("text").getChildByName("text").newInstance(theText);
|
||||
ccDef.getChildByName("text").getMutator().addValue(codeableConcept, textElem);
|
||||
}
|
||||
|
||||
detailsChildDef.getMutator().addValue(theIssue, codeableConcept);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.util;
|
||||
|
||||
import jakarta.annotation.Nonnull;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* This class collects items from a stream to a given limit and know whether there are
|
||||
* still more items beyond that limit.
|
||||
*
|
||||
* @param <T> the type of object being streamed
|
||||
*/
|
||||
public class StopLimitAccumulator<T> {
|
||||
private final boolean isTruncated;
|
||||
private final List<T> myList;
|
||||
|
||||
private StopLimitAccumulator(List<T> theList, boolean theIsTruncated) {
|
||||
myList = Collections.unmodifiableList(theList);
|
||||
isTruncated = theIsTruncated;
|
||||
}
|
||||
|
||||
public static <T> StopLimitAccumulator<T> fromStreamAndLimit(@Nonnull Stream<T> theItemStream, long theLimit) {
|
||||
assert theLimit > 0;
|
||||
AtomicBoolean isBeyondLimit = new AtomicBoolean(false);
|
||||
List<T> accumulator = new ArrayList<>();
|
||||
|
||||
theItemStream
|
||||
.limit(theLimit + 1) // Fetch one extra item to see if there are any more items past our limit
|
||||
.forEach(item -> {
|
||||
if (accumulator.size() < theLimit) {
|
||||
accumulator.add(item);
|
||||
} else {
|
||||
isBeyondLimit.set(true);
|
||||
}
|
||||
});
|
||||
return new StopLimitAccumulator<>(accumulator, isBeyondLimit.get());
|
||||
}
|
||||
|
||||
public boolean isTruncated() {
|
||||
return isTruncated;
|
||||
}
|
||||
|
||||
public List<T> getItemList() {
|
||||
return myList;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
package ca.uhn.fhir.util;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class StopLimitAccumulatorTest {
|
||||
|
||||
@Test
|
||||
void testFromStreamAndLimit_withNoTruncation() {
|
||||
// setup
|
||||
Stream<Integer> stream = Stream.of(1, 2, 3, 4, 5);
|
||||
int limit = 5;
|
||||
|
||||
// execute
|
||||
StopLimitAccumulator<Integer> accumulator = StopLimitAccumulator.fromStreamAndLimit(stream, limit);
|
||||
|
||||
// verify
|
||||
assertFalse(accumulator.isTruncated(), "The result should not be truncated");
|
||||
assertEquals(List.of(1, 2, 3, 4, 5), accumulator.getItemList(), "The list should contain all items within the limit");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFromStreamAndLimit_withTruncation() {
|
||||
// setup
|
||||
Stream<Integer> stream = Stream.of(1, 2, 3, 4, 5, 6, 7);
|
||||
int limit = 5;
|
||||
|
||||
// execute
|
||||
StopLimitAccumulator<Integer> accumulator = StopLimitAccumulator.fromStreamAndLimit(stream, limit);
|
||||
|
||||
// verify
|
||||
assertTrue(accumulator.isTruncated(), "The result should be truncated");
|
||||
assertEquals(List.of(1, 2, 3, 4, 5), accumulator.getItemList(), "The list should contain only the items within the limit");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFromStreamAndLimit_withEmptyStream() {
|
||||
// setup
|
||||
Stream<Integer> stream = Stream.empty();
|
||||
int limit = 5;
|
||||
|
||||
// execute
|
||||
StopLimitAccumulator<Integer> accumulator = StopLimitAccumulator.fromStreamAndLimit(stream, limit);
|
||||
|
||||
// verify
|
||||
assertFalse(accumulator.isTruncated(), "The result should not be truncated for an empty stream");
|
||||
assertTrue(accumulator.getItemList().isEmpty(), "The list should be empty");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testImmutabilityOfItemList() {
|
||||
// setup
|
||||
Stream<Integer> stream = Stream.of(1, 2, 3);
|
||||
int limit = 3;
|
||||
|
||||
StopLimitAccumulator<Integer> accumulator = StopLimitAccumulator.fromStreamAndLimit(stream, limit);
|
||||
|
||||
// execute and Assert
|
||||
List<Integer> itemList = accumulator.getItemList();
|
||||
assertThrows(UnsupportedOperationException.class, () -> itemList.add(4), "The list should be immutable");
|
||||
}
|
||||
}
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -31,11 +31,13 @@ import ca.uhn.fhir.jpa.api.pid.StreamTemplate;
|
|||
import ca.uhn.fhir.jpa.api.pid.TypedResourcePid;
|
||||
import ca.uhn.fhir.jpa.api.pid.TypedResourceStream;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
|
@ -46,6 +48,7 @@ import jakarta.annotation.Nonnull;
|
|||
import jakarta.annotation.Nullable;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.function.Supplier;
|
||||
|
@ -56,6 +59,8 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
|||
|
||||
private final IResourceTableDao myResourceTableDao;
|
||||
|
||||
private final IResourceLinkDao myResourceLinkDao;
|
||||
|
||||
private final MatchUrlService myMatchUrlService;
|
||||
|
||||
private final DaoRegistry myDaoRegistry;
|
||||
|
@ -71,11 +76,13 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
|||
|
||||
public Batch2DaoSvcImpl(
|
||||
IResourceTableDao theResourceTableDao,
|
||||
IResourceLinkDao theResourceLinkDao,
|
||||
MatchUrlService theMatchUrlService,
|
||||
DaoRegistry theDaoRegistry,
|
||||
FhirContext theFhirContext,
|
||||
IHapiTransactionService theTransactionService) {
|
||||
myResourceTableDao = theResourceTableDao;
|
||||
myResourceLinkDao = theResourceLinkDao;
|
||||
myMatchUrlService = theMatchUrlService;
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
myFhirContext = theFhirContext;
|
||||
|
@ -95,6 +102,11 @@ public class Batch2DaoSvcImpl implements IBatch2DaoSvc {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<IdDt> streamSourceIdsThatReferenceTargetId(IIdType theTargetId) {
|
||||
return myResourceLinkDao.streamSourceIdsForTargetFhirId(theTargetId.getResourceType(), theTargetId.getIdPart());
|
||||
}
|
||||
|
||||
private Stream<TypedResourcePid> streamResourceIdsWithUrl(
|
||||
Date theStart, Date theEnd, String theUrl, RequestPartitionId theRequestPartitionId) {
|
||||
validateUrl(theUrl);
|
||||
|
|
|
@ -44,12 +44,18 @@ public class Batch2SupportConfig {
|
|||
@Bean
|
||||
public IBatch2DaoSvc batch2DaoSvc(
|
||||
IResourceTableDao theResourceTableDao,
|
||||
IResourceLinkDao theResourceLinkDao,
|
||||
MatchUrlService theMatchUrlService,
|
||||
DaoRegistry theDaoRegistry,
|
||||
FhirContext theFhirContext,
|
||||
IHapiTransactionService theTransactionService) {
|
||||
return new Batch2DaoSvcImpl(
|
||||
theResourceTableDao, theMatchUrlService, theDaoRegistry, theFhirContext, theTransactionService);
|
||||
theResourceTableDao,
|
||||
theResourceLinkDao,
|
||||
theMatchUrlService,
|
||||
theDaoRegistry,
|
||||
theFhirContext,
|
||||
theTransactionService);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobSubmitterImpl;
|
||||
import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
|
@ -55,6 +57,7 @@ import ca.uhn.fhir.jpa.dao.MatchResourceUrlService;
|
|||
import ca.uhn.fhir.jpa.dao.ResourceHistoryCalculator;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
|
||||
|
@ -175,6 +178,7 @@ import ca.uhn.fhir.jpa.validation.JpaValidationSupportChain;
|
|||
import ca.uhn.fhir.jpa.validation.ResourceLoaderImpl;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
|
@ -930,7 +934,31 @@ public class JpaConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public IReplaceReferencesSvc replaceReferencesSvc(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
|
||||
return new ReplaceReferencesSvcImpl(theFhirContext, theDaoRegistry);
|
||||
public Batch2TaskHelper batch2TaskHelper() {
|
||||
return new Batch2TaskHelper();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IReplaceReferencesSvc replaceReferencesSvc(
|
||||
DaoRegistry theDaoRegistry,
|
||||
HapiTransactionService theHapiTransactionService,
|
||||
IResourceLinkDao theResourceLinkDao,
|
||||
IJobCoordinator theJobCoordinator,
|
||||
ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundle,
|
||||
Batch2TaskHelper theBatch2TaskHelper,
|
||||
JpaStorageSettings theStorageSettings) {
|
||||
return new ReplaceReferencesSvcImpl(
|
||||
theDaoRegistry,
|
||||
theHapiTransactionService,
|
||||
theResourceLinkDao,
|
||||
theJobCoordinator,
|
||||
theReplaceReferencesPatchBundle,
|
||||
theBatch2TaskHelper,
|
||||
theStorageSettings);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ReplaceReferencesPatchBundleSvc replaceReferencesPatchBundleSvc(DaoRegistry theDaoRegistry) {
|
||||
return new ReplaceReferencesPatchBundleSvc(theDaoRegistry);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,17 +19,26 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.config.r4;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.api.IDaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.config.GeneratedDaoAndResourceProviderConfigR4;
|
||||
import ca.uhn.fhir.jpa.config.JpaConfig;
|
||||
import ca.uhn.fhir.jpa.dao.ITransactionProcessorVersionAdapter;
|
||||
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.graphql.GraphQLProviderWithIntrospection;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.provider.IReplaceReferencesSvc;
|
||||
import ca.uhn.fhir.jpa.provider.JpaSystemProvider;
|
||||
import ca.uhn.fhir.jpa.provider.merge.PatientMergeProvider;
|
||||
import ca.uhn.fhir.jpa.provider.merge.ResourceMergeService;
|
||||
import ca.uhn.fhir.jpa.term.TermLoaderSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermVersionAdapterSvcR4;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
|
@ -96,4 +105,30 @@ public class JpaR4Config {
|
|||
ITermDeferredStorageSvc theDeferredStorageSvc, ITermCodeSystemStorageSvc theCodeSystemStorageSvc) {
|
||||
return new TermLoaderSvcImpl(theDeferredStorageSvc, theCodeSystemStorageSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ResourceMergeService resourceMergeService(
|
||||
DaoRegistry theDaoRegistry,
|
||||
IReplaceReferencesSvc theReplaceReferencesSvc,
|
||||
HapiTransactionService theHapiTransactionService,
|
||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc,
|
||||
IJobCoordinator theJobCoordinator,
|
||||
Batch2TaskHelper theBatch2TaskHelper,
|
||||
JpaStorageSettings theStorageSettings) {
|
||||
|
||||
return new ResourceMergeService(
|
||||
theStorageSettings,
|
||||
theDaoRegistry,
|
||||
theReplaceReferencesSvc,
|
||||
theHapiTransactionService,
|
||||
theRequestPartitionHelperSvc,
|
||||
theJobCoordinator,
|
||||
theBatch2TaskHelper);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public PatientMergeProvider patientMergeProvider(
|
||||
FhirContext theFhirContext, DaoRegistry theDaoRegistry, ResourceMergeService theResourceMergeService) {
|
||||
return new PatientMergeProvider(theFhirContext, theDaoRegistry, theResourceMergeService);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,12 +21,14 @@ package ca.uhn.fhir.jpa.dao.data;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public interface IResourceLinkDao extends JpaRepository<ResourceLink, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
|
@ -46,4 +48,30 @@ public interface IResourceLinkDao extends JpaRepository<ResourceLink, Long>, IHa
|
|||
*/
|
||||
@Query("SELECT t FROM ResourceLink t LEFT JOIN FETCH t.myTargetResource tr WHERE t.myId in :pids")
|
||||
List<ResourceLink> findByPidAndFetchTargetDetails(@Param("pids") List<Long> thePids);
|
||||
|
||||
/**
|
||||
* Stream Resource Ids of all resources that have a reference to the provided resource id
|
||||
*
|
||||
* @param theTargetResourceType the resource type part of the id
|
||||
* @param theTargetResourceFhirId the value part of the id
|
||||
* @return
|
||||
*/
|
||||
@Query(
|
||||
"SELECT DISTINCT new ca.uhn.fhir.model.primitive.IdDt(t.mySourceResourceType, t.mySourceResource.myFhirId) FROM ResourceLink t WHERE t.myTargetResourceType = :resourceType AND t.myTargetResource.myFhirId = :resourceFhirId")
|
||||
Stream<IdDt> streamSourceIdsForTargetFhirId(
|
||||
@Param("resourceType") String theTargetResourceType,
|
||||
@Param("resourceFhirId") String theTargetResourceFhirId);
|
||||
|
||||
/**
|
||||
* Count the number of resources that have a reference to the provided resource id
|
||||
*
|
||||
* @param theTargetResourceType the resource type part of the id
|
||||
* @param theTargetResourceFhirId the value part of the id
|
||||
* @return
|
||||
*/
|
||||
@Query(
|
||||
"SELECT COUNT(DISTINCT t.mySourceResourcePid) FROM ResourceLink t WHERE t.myTargetResourceType = :resourceType AND t.myTargetResource.myFhirId = :resourceFhirId")
|
||||
Integer countResourcesTargetingFhirTypeAndFhirId(
|
||||
@Param("resourceType") String theTargetResourceType,
|
||||
@Param("resourceFhirId") String theTargetResourceFhirId);
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.provider;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
|
||||
import ca.uhn.fhir.jpa.api.dao.PatientEverythingParameters;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
|
@ -42,6 +43,7 @@ import ca.uhn.fhir.rest.param.TokenParam;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
@ -50,6 +52,8 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
|
||||
public abstract class BaseJpaResourceProviderPatient<T extends IBaseResource> extends BaseJpaResourceProvider<T> {
|
||||
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
/**
|
||||
* Patient/123/$everything
|
||||
*/
|
||||
|
|
|
@ -19,13 +19,25 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.provider;
|
||||
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
/**
|
||||
* Contract for service which replaces references
|
||||
* Find all references to a source resource and replace them with references to the provided target
|
||||
*/
|
||||
public interface IReplaceReferencesSvc {
|
||||
|
||||
IBaseParameters replaceReferences(String theSourceRefId, String theTargetRefId, RequestDetails theRequest);
|
||||
/**
|
||||
* Find all references to a source resource and replace them with references to the provided target
|
||||
*/
|
||||
IBaseParameters replaceReferences(
|
||||
ReplaceReferencesRequest theReplaceReferencesRequest, RequestDetails theRequestDetails);
|
||||
|
||||
/**
|
||||
* To support $merge preview mode, provide a count of how many references would be updated if replaceReferences
|
||||
* was called
|
||||
*/
|
||||
Integer countResourcesReferencingResource(IIdType theResourceId, RequestDetails theRequestDetails);
|
||||
}
|
||||
|
|
|
@ -19,30 +19,46 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.provider;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.merge.MergeResourceHelper;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.model.api.annotation.Description;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.annotation.Transaction;
|
||||
import ca.uhn.fhir.rest.annotation.TransactionParam;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID;
|
||||
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static software.amazon.awssdk.utils.StringUtils.isBlank;
|
||||
|
||||
public final class JpaSystemProvider<T, MT> extends BaseJpaSystemProvider<T, MT> {
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
|
||||
@Description(
|
||||
"Marks all currently existing resources of a given type, or all resources of all types, for reindexing.")
|
||||
|
@ -145,13 +161,59 @@ public final class JpaSystemProvider<T, MT> extends BaseJpaSystemProvider<T, MT>
|
|||
@Operation(name = ProviderConstants.OPERATION_REPLACE_REFERENCES, global = true)
|
||||
@Description(
|
||||
value =
|
||||
"This operation searches for all references matching the provided id and updates them to references to the provided newReferenceTargetId.",
|
||||
"This operation searches for all references matching the provided id and updates them to references to the provided target-reference-id.",
|
||||
shortDefinition = "Repoints referencing resources to another resources instance")
|
||||
public IBaseParameters replaceReferences(
|
||||
@OperationParam(name = ProviderConstants.PARAM_SOURCE_REFERENCE_ID) String theSourceId,
|
||||
@OperationParam(name = ProviderConstants.PARAM_TARGET_REFERENCE_ID) String theTargetId,
|
||||
RequestDetails theRequest) {
|
||||
@OperationParam(
|
||||
name = ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID,
|
||||
min = 1,
|
||||
typeName = "string")
|
||||
IPrimitiveType<String> theSourceId,
|
||||
@OperationParam(
|
||||
name = ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID,
|
||||
min = 1,
|
||||
typeName = "string")
|
||||
IPrimitiveType<String> theTargetId,
|
||||
@OperationParam(
|
||||
name = ProviderConstants.OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT,
|
||||
typeName = "unsignedInt")
|
||||
IPrimitiveType<Integer> theResourceLimit,
|
||||
ServletRequestDetails theServletRequest) {
|
||||
startRequest(theServletRequest);
|
||||
|
||||
return getReplaceReferencesSvc().replaceReferences(theSourceId, theTargetId, theRequest);
|
||||
try {
|
||||
validateReplaceReferencesParams(theSourceId.getValue(), theTargetId.getValue());
|
||||
|
||||
int resourceLimit = MergeResourceHelper.setResourceLimitFromParameter(myStorageSettings, theResourceLimit);
|
||||
|
||||
IdDt sourceId = new IdDt(theSourceId.getValue());
|
||||
IdDt targetId = new IdDt(theTargetId.getValue());
|
||||
RequestPartitionId partitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(
|
||||
theServletRequest, ReadPartitionIdRequestDetails.forRead(targetId));
|
||||
ReplaceReferencesRequest replaceReferencesRequest =
|
||||
new ReplaceReferencesRequest(sourceId, targetId, resourceLimit, partitionId);
|
||||
IBaseParameters retval =
|
||||
getReplaceReferencesSvc().replaceReferences(replaceReferencesRequest, theServletRequest);
|
||||
if (ParametersUtil.getNamedParameter(getContext(), retval, OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK)
|
||||
.isPresent()) {
|
||||
HttpServletResponse response = theServletRequest.getServletResponse();
|
||||
response.setStatus(HttpServletResponse.SC_ACCEPTED);
|
||||
}
|
||||
return retval;
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
}
|
||||
|
||||
private static void validateReplaceReferencesParams(String theSourceId, String theTargetId) {
|
||||
if (isBlank(theSourceId)) {
|
||||
throw new InvalidRequestException(Msg.code(2583) + "Parameter '"
|
||||
+ OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID + "' is blank");
|
||||
}
|
||||
|
||||
if (isBlank(theTargetId)) {
|
||||
throw new InvalidRequestException(Msg.code(2584) + "Parameter '"
|
||||
+ OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID + "' is blank");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,222 +19,141 @@
|
|||
*/
|
||||
package ca.uhn.fhir.jpa.provider;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesJobParameters;
|
||||
import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.ResourceReferenceInfo;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.util.StopLimitAccumulator;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.Type;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.security.InvalidParameterException;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static ca.uhn.fhir.jpa.patch.FhirPatch.OPERATION_REPLACE;
|
||||
import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_OPERATION;
|
||||
import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_PATH;
|
||||
import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_TYPE;
|
||||
import static ca.uhn.fhir.jpa.patch.FhirPatch.PARAMETER_VALUE;
|
||||
import static ca.uhn.fhir.rest.api.Constants.PARAM_ID;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.PARAM_SOURCE_REFERENCE_ID;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.PARAM_TARGET_REFERENCE_ID;
|
||||
import static software.amazon.awssdk.utils.StringUtils.isBlank;
|
||||
import static ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesAppCtx.JOB_REPLACE_REFERENCES;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK;
|
||||
|
||||
public class ReplaceReferencesSvcImpl implements IReplaceReferencesSvc {
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReplaceReferencesSvcImpl.class);
|
||||
public static final String RESOURCE_TYPES_SYSTEM = "http://hl7.org/fhir/ValueSet/resource-types";
|
||||
private final DaoRegistry myDaoRegistry;
|
||||
private final HapiTransactionService myHapiTransactionService;
|
||||
private final IResourceLinkDao myResourceLinkDao;
|
||||
private final IJobCoordinator myJobCoordinator;
|
||||
private final ReplaceReferencesPatchBundleSvc myReplaceReferencesPatchBundleSvc;
|
||||
private final Batch2TaskHelper myBatch2TaskHelper;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
|
||||
public ReplaceReferencesSvcImpl(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
|
||||
myFhirContext = theFhirContext;
|
||||
public ReplaceReferencesSvcImpl(
|
||||
DaoRegistry theDaoRegistry,
|
||||
HapiTransactionService theHapiTransactionService,
|
||||
IResourceLinkDao theResourceLinkDao,
|
||||
IJobCoordinator theJobCoordinator,
|
||||
ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundleSvc,
|
||||
Batch2TaskHelper theBatch2TaskHelper,
|
||||
JpaStorageSettings theStorageSettings) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
myHapiTransactionService = theHapiTransactionService;
|
||||
myResourceLinkDao = theResourceLinkDao;
|
||||
myJobCoordinator = theJobCoordinator;
|
||||
myReplaceReferencesPatchBundleSvc = theReplaceReferencesPatchBundleSvc;
|
||||
myBatch2TaskHelper = theBatch2TaskHelper;
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseParameters replaceReferences(String theSourceRefId, String theTargetRefId, RequestDetails theRequest) {
|
||||
public IBaseParameters replaceReferences(
|
||||
ReplaceReferencesRequest theReplaceReferencesRequest, RequestDetails theRequestDetails) {
|
||||
theReplaceReferencesRequest.validateOrThrowInvalidParameterException();
|
||||
|
||||
validateParameters(theSourceRefId, theTargetRefId);
|
||||
IIdType sourceRefId = new IdDt(theSourceRefId);
|
||||
IIdType targetRefId = new IdDt(theTargetRefId);
|
||||
|
||||
// todo jm: this could be problematic depending on referenceing object set size, however we are adding
|
||||
// batch job option to handle that case as part of this feature
|
||||
List<? extends IBaseResource> referencingResources = findReferencingResourceIds(sourceRefId, theRequest);
|
||||
|
||||
return replaceReferencesInTransaction(referencingResources, sourceRefId, targetRefId, theRequest);
|
||||
}
|
||||
|
||||
private IBaseParameters replaceReferencesInTransaction(
|
||||
List<? extends IBaseResource> theReferencingResources,
|
||||
IIdType theCurrentTargetId,
|
||||
IIdType theNewTargetId,
|
||||
RequestDetails theRequest) {
|
||||
|
||||
Parameters resultParams = new Parameters();
|
||||
// map resourceType -> map resourceId -> patch Parameters
|
||||
Map<String, Map<IIdType, Parameters>> parametersMap =
|
||||
buildPatchParameterMap(theReferencingResources, theCurrentTargetId, theNewTargetId);
|
||||
|
||||
for (Map.Entry<String, Map<IIdType, Parameters>> mapEntry : parametersMap.entrySet()) {
|
||||
String resourceType = mapEntry.getKey();
|
||||
IFhirResourceDao<?> resDao = myDaoRegistry.getResourceDao(resourceType);
|
||||
if (resDao == null) {
|
||||
throw new InternalErrorException(
|
||||
Msg.code(2588) + "No DAO registered for resource type: " + resourceType);
|
||||
}
|
||||
|
||||
// patch each resource of resourceType
|
||||
patchResourceTypeResources(mapEntry, resDao, resultParams, theRequest);
|
||||
}
|
||||
|
||||
return resultParams;
|
||||
}
|
||||
|
||||
private void patchResourceTypeResources(
|
||||
Map.Entry<String, Map<IIdType, Parameters>> mapEntry,
|
||||
IFhirResourceDao<?> resDao,
|
||||
Parameters resultParams,
|
||||
RequestDetails theRequest) {
|
||||
|
||||
for (Map.Entry<IIdType, Parameters> idParamMapEntry :
|
||||
mapEntry.getValue().entrySet()) {
|
||||
IIdType resourceId = idParamMapEntry.getKey();
|
||||
Parameters parameters = idParamMapEntry.getValue();
|
||||
|
||||
MethodOutcome result =
|
||||
resDao.patch(resourceId, null, PatchTypeEnum.FHIR_PATCH_JSON, null, parameters, theRequest);
|
||||
|
||||
resultParams.addParameter().setResource((Resource) result.getOperationOutcome());
|
||||
if (theRequestDetails.isPreferAsync()) {
|
||||
return replaceReferencesPreferAsync(theReplaceReferencesRequest, theRequestDetails);
|
||||
} else {
|
||||
return replaceReferencesPreferSync(theReplaceReferencesRequest, theRequestDetails);
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Map<IIdType, Parameters>> buildPatchParameterMap(
|
||||
List<? extends IBaseResource> theReferencingResources,
|
||||
IIdType theCurrentReferencedResourceId,
|
||||
IIdType theNewReferencedResourceId) {
|
||||
Map<String, Map<IIdType, Parameters>> paramsMap = new HashMap<>();
|
||||
|
||||
for (IBaseResource referencingResource : theReferencingResources) {
|
||||
// resource can have more than one reference to the same target resource
|
||||
for (ResourceReferenceInfo refInfo :
|
||||
myFhirContext.newTerser().getAllResourceReferences(referencingResource)) {
|
||||
|
||||
addReferenceToMapIfForSource(
|
||||
theCurrentReferencedResourceId,
|
||||
theNewReferencedResourceId,
|
||||
referencingResource,
|
||||
refInfo,
|
||||
paramsMap);
|
||||
}
|
||||
}
|
||||
return paramsMap;
|
||||
@Override
|
||||
public Integer countResourcesReferencingResource(IIdType theResourceId, RequestDetails theRequestDetails) {
|
||||
return myHapiTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.execute(() -> myResourceLinkDao.countResourcesTargetingFhirTypeAndFhirId(
|
||||
theResourceId.getResourceType(), theResourceId.getIdPart()));
|
||||
}
|
||||
|
||||
private void addReferenceToMapIfForSource(
|
||||
IIdType theCurrentReferencedResourceId,
|
||||
IIdType theNewReferencedResourceId,
|
||||
IBaseResource referencingResource,
|
||||
ResourceReferenceInfo refInfo,
|
||||
Map<String, Map<IIdType, Parameters>> paramsMap) {
|
||||
if (!refInfo.getResourceReference()
|
||||
.getReferenceElement()
|
||||
.toUnqualifiedVersionless()
|
||||
.getValueAsString()
|
||||
.equals(theCurrentReferencedResourceId
|
||||
.toUnqualifiedVersionless()
|
||||
.getValueAsString())) {
|
||||
private IBaseParameters replaceReferencesPreferAsync(
|
||||
ReplaceReferencesRequest theReplaceReferencesRequest, RequestDetails theRequestDetails) {
|
||||
|
||||
// not a reference to the resource being replaced
|
||||
return;
|
||||
}
|
||||
Task task = myBatch2TaskHelper.startJobAndCreateAssociatedTask(
|
||||
myDaoRegistry.getResourceDao(Task.class),
|
||||
theRequestDetails,
|
||||
myJobCoordinator,
|
||||
JOB_REPLACE_REFERENCES,
|
||||
new ReplaceReferencesJobParameters(
|
||||
theReplaceReferencesRequest, myStorageSettings.getDefaultTransactionEntriesForWrite()));
|
||||
|
||||
Parameters.ParametersParameterComponent paramComponent = createReplaceReferencePatchOperation(
|
||||
referencingResource.fhirType() + "." + refInfo.getName(),
|
||||
new Reference(
|
||||
theNewReferencedResourceId.toUnqualifiedVersionless().getValueAsString()));
|
||||
|
||||
paramsMap
|
||||
// preserve order, in case it could matter
|
||||
.computeIfAbsent(referencingResource.fhirType(), k -> new LinkedHashMap<>())
|
||||
.computeIfAbsent(referencingResource.getIdElement(), k -> new Parameters())
|
||||
.addParameter(paramComponent);
|
||||
Parameters retval = new Parameters();
|
||||
task.setIdElement(task.getIdElement().toUnqualifiedVersionless());
|
||||
task.getMeta().setVersionId(null);
|
||||
retval.addParameter()
|
||||
.setName(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK)
|
||||
.setResource(task);
|
||||
return retval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to perform the operation synchronously. However if there is more than a page of results, fall back to asynchronous operation
|
||||
*/
|
||||
@Nonnull
|
||||
private Parameters.ParametersParameterComponent createReplaceReferencePatchOperation(
|
||||
String thePath, Type theValue) {
|
||||
private IBaseParameters replaceReferencesPreferSync(
|
||||
ReplaceReferencesRequest theReplaceReferencesRequest, RequestDetails theRequestDetails) {
|
||||
|
||||
Parameters.ParametersParameterComponent operation = new Parameters.ParametersParameterComponent();
|
||||
operation.setName(PARAMETER_OPERATION);
|
||||
operation.addPart().setName(PARAMETER_TYPE).setValue(new CodeType(OPERATION_REPLACE));
|
||||
operation.addPart().setName(PARAMETER_PATH).setValue(new StringType(thePath));
|
||||
operation.addPart().setName(PARAMETER_VALUE).setValue(theValue);
|
||||
return operation;
|
||||
// TODO KHS get partition from request
|
||||
StopLimitAccumulator<IdDt> accumulator = myHapiTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.execute(() -> getAllPidsWithLimit(theReplaceReferencesRequest));
|
||||
|
||||
if (accumulator.isTruncated()) {
|
||||
throw new PreconditionFailedException(Msg.code(2597) + "Number of resources with references to "
|
||||
+ theReplaceReferencesRequest.sourceId
|
||||
+ " exceeds the resource-limit "
|
||||
+ theReplaceReferencesRequest.resourceLimit
|
||||
+ ". Submit the request asynchronsly by adding the HTTP Header 'Prefer: respond-async'.");
|
||||
}
|
||||
|
||||
Bundle result = myReplaceReferencesPatchBundleSvc.patchReferencingResources(
|
||||
theReplaceReferencesRequest, accumulator.getItemList(), theRequestDetails);
|
||||
|
||||
Parameters retval = new Parameters();
|
||||
retval.addParameter()
|
||||
.setName(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME)
|
||||
.setResource(result);
|
||||
return retval;
|
||||
}
|
||||
|
||||
private List<? extends IBaseResource> findReferencingResourceIds(
|
||||
IIdType theSourceRefIdParam, RequestDetails theRequest) {
|
||||
IFhirResourceDao<?> dao = getDao(theSourceRefIdParam.getResourceType());
|
||||
if (dao == null) {
|
||||
throw new InternalErrorException(
|
||||
Msg.code(2582) + "Couldn't obtain DAO for resource type" + theSourceRefIdParam.getResourceType());
|
||||
}
|
||||
private @Nonnull StopLimitAccumulator<IdDt> getAllPidsWithLimit(
|
||||
ReplaceReferencesRequest theReplaceReferencesRequest) {
|
||||
|
||||
SearchParameterMap parameterMap = new SearchParameterMap();
|
||||
parameterMap.add(PARAM_ID, new StringParam(theSourceRefIdParam.getValue()));
|
||||
parameterMap.addRevInclude(new Include("*"));
|
||||
return dao.search(parameterMap, theRequest).getAllResources();
|
||||
}
|
||||
|
||||
private IFhirResourceDao<?> getDao(String theResourceName) {
|
||||
return myDaoRegistry.getResourceDao(theResourceName);
|
||||
}
|
||||
|
||||
private void validateParameters(String theSourceRefIdParam, String theTargetRefIdParam) {
|
||||
if (isBlank(theSourceRefIdParam)) {
|
||||
throw new InvalidParameterException(
|
||||
Msg.code(2583) + "Parameter '" + PARAM_SOURCE_REFERENCE_ID + "' is blank");
|
||||
}
|
||||
|
||||
if (isBlank(theTargetRefIdParam)) {
|
||||
throw new InvalidParameterException(
|
||||
Msg.code(2584) + "Parameter '" + PARAM_TARGET_REFERENCE_ID + "' is blank");
|
||||
}
|
||||
|
||||
IIdType sourceId = new IdDt(theSourceRefIdParam);
|
||||
if (isBlank(sourceId.getResourceType())) {
|
||||
throw new InvalidParameterException(
|
||||
Msg.code(2585) + "'" + PARAM_SOURCE_REFERENCE_ID + "' must be a resource type qualified id");
|
||||
}
|
||||
|
||||
IIdType targetId = new IdDt(theTargetRefIdParam);
|
||||
if (isBlank(targetId.getResourceType())) {
|
||||
throw new InvalidParameterException(
|
||||
Msg.code(2586) + "'" + PARAM_TARGET_REFERENCE_ID + "' must be a resource type qualified id");
|
||||
}
|
||||
|
||||
if (!targetId.getResourceType().equals(sourceId.getResourceType())) {
|
||||
throw new InvalidParameterException(
|
||||
Msg.code(2587) + "Source and target id parameters must be for the same resource type");
|
||||
}
|
||||
Stream<IdDt> idStream = myResourceLinkDao.streamSourceIdsForTargetFhirId(
|
||||
theReplaceReferencesRequest.sourceId.getResourceType(),
|
||||
theReplaceReferencesRequest.sourceId.getIdPart());
|
||||
StopLimitAccumulator<IdDt> accumulator =
|
||||
StopLimitAccumulator.fromStreamAndLimit(idStream, theReplaceReferencesRequest.resourceLimit);
|
||||
return accumulator;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,147 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.provider.merge;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
|
||||
import ca.uhn.fhir.batch2.jobs.merge.MergeJobParameters;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.util.CanonicalIdentifier;
|
||||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* See <a href="https://build.fhir.org/patient-operation-merge.html">Patient $merge spec</a>
|
||||
*/
|
||||
public abstract class BaseMergeOperationInputParameters {
|
||||
|
||||
private List<CanonicalIdentifier> mySourceResourceIdentifiers;
|
||||
private List<CanonicalIdentifier> myTargetResourceIdentifiers;
|
||||
private IBaseReference mySourceResource;
|
||||
private IBaseReference myTargetResource;
|
||||
private boolean myPreview;
|
||||
private boolean myDeleteSource;
|
||||
private IBaseResource myResultResource;
|
||||
private final int myResourceLimit;
|
||||
|
||||
protected BaseMergeOperationInputParameters(int theResourceLimit) {
|
||||
myResourceLimit = theResourceLimit;
|
||||
}
|
||||
|
||||
public abstract String getSourceResourceParameterName();
|
||||
|
||||
public abstract String getTargetResourceParameterName();
|
||||
|
||||
public abstract String getSourceIdentifiersParameterName();
|
||||
|
||||
public abstract String getTargetIdentifiersParameterName();
|
||||
|
||||
public abstract String getResultResourceParameterName();
|
||||
|
||||
public List<CanonicalIdentifier> getSourceIdentifiers() {
|
||||
return mySourceResourceIdentifiers;
|
||||
}
|
||||
|
||||
public boolean hasAtLeastOneSourceIdentifier() {
|
||||
return mySourceResourceIdentifiers != null && !mySourceResourceIdentifiers.isEmpty();
|
||||
}
|
||||
|
||||
public void setSourceResourceIdentifiers(List<CanonicalIdentifier> theSourceIdentifiers) {
|
||||
this.mySourceResourceIdentifiers = theSourceIdentifiers;
|
||||
}
|
||||
|
||||
public List<CanonicalIdentifier> getTargetIdentifiers() {
|
||||
return myTargetResourceIdentifiers;
|
||||
}
|
||||
|
||||
public boolean hasAtLeastOneTargetIdentifier() {
|
||||
return myTargetResourceIdentifiers != null && !myTargetResourceIdentifiers.isEmpty();
|
||||
}
|
||||
|
||||
public void setTargetResourceIdentifiers(List<CanonicalIdentifier> theTargetIdentifiers) {
|
||||
this.myTargetResourceIdentifiers = theTargetIdentifiers;
|
||||
}
|
||||
|
||||
public boolean getPreview() {
|
||||
return myPreview;
|
||||
}
|
||||
|
||||
public void setPreview(boolean thePreview) {
|
||||
this.myPreview = thePreview;
|
||||
}
|
||||
|
||||
public boolean getDeleteSource() {
|
||||
return myDeleteSource;
|
||||
}
|
||||
|
||||
public void setDeleteSource(boolean theDeleteSource) {
|
||||
this.myDeleteSource = theDeleteSource;
|
||||
}
|
||||
|
||||
public IBaseResource getResultResource() {
|
||||
return myResultResource;
|
||||
}
|
||||
|
||||
public void setResultResource(IBaseResource theResultResource) {
|
||||
this.myResultResource = theResultResource;
|
||||
}
|
||||
|
||||
public IBaseReference getSourceResource() {
|
||||
return mySourceResource;
|
||||
}
|
||||
|
||||
public void setSourceResource(IBaseReference theSourceResource) {
|
||||
this.mySourceResource = theSourceResource;
|
||||
}
|
||||
|
||||
public IBaseReference getTargetResource() {
|
||||
return myTargetResource;
|
||||
}
|
||||
|
||||
public void setTargetResource(IBaseReference theTargetResource) {
|
||||
this.myTargetResource = theTargetResource;
|
||||
}
|
||||
|
||||
public int getResourceLimit() {
|
||||
return myResourceLimit;
|
||||
}
|
||||
|
||||
public MergeJobParameters asMergeJobParameters(
|
||||
FhirContext theFhirContext,
|
||||
JpaStorageSettings theStorageSettings,
|
||||
Patient theSourceResource,
|
||||
Patient theTargetResource,
|
||||
RequestPartitionId thePartitionId) {
|
||||
MergeJobParameters retval = new MergeJobParameters();
|
||||
if (getResultResource() != null) {
|
||||
retval.setResultResource(theFhirContext.newJsonParser().encodeResourceToString(getResultResource()));
|
||||
}
|
||||
retval.setDeleteSource(getDeleteSource());
|
||||
retval.setBatchSize(theStorageSettings.getDefaultTransactionEntriesForWrite());
|
||||
retval.setSourceId(new FhirIdJson(theSourceResource.getIdElement().toVersionless()));
|
||||
retval.setTargetId(new FhirIdJson(theTargetResource.getIdElement().toVersionless()));
|
||||
retval.setPartitionId(thePartitionId);
|
||||
return retval;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.provider.merge;
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
/**
|
||||
* See <a href="https://build.fhir.org/patient-operation-merge.html">Patient $merge spec</a>
|
||||
*/
|
||||
public class MergeOperationOutcome {
|
||||
private IBaseOperationOutcome myOperationOutcome;
|
||||
private int myHttpStatusCode;
|
||||
private IBaseResource myUpdatedTargetResource;
|
||||
private IBaseResource myTask;
|
||||
|
||||
public IBaseOperationOutcome getOperationOutcome() {
|
||||
return myOperationOutcome;
|
||||
}
|
||||
|
||||
public void setOperationOutcome(IBaseOperationOutcome theOperationOutcome) {
|
||||
this.myOperationOutcome = theOperationOutcome;
|
||||
}
|
||||
|
||||
public int getHttpStatusCode() {
|
||||
return myHttpStatusCode;
|
||||
}
|
||||
|
||||
public void setHttpStatusCode(int theHttpStatusCode) {
|
||||
this.myHttpStatusCode = theHttpStatusCode;
|
||||
}
|
||||
|
||||
public IBaseResource getUpdatedTargetResource() {
|
||||
return myUpdatedTargetResource;
|
||||
}
|
||||
|
||||
public void setUpdatedTargetResource(IBaseResource theUpdatedTargetResource) {
|
||||
this.myUpdatedTargetResource = theUpdatedTargetResource;
|
||||
}
|
||||
|
||||
public IBaseResource getTask() {
|
||||
return myTask;
|
||||
}
|
||||
|
||||
public void setTask(IBaseResource theTask) {
|
||||
this.myTask = theTask;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.provider.merge;
|
||||
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
|
||||
class MergeValidationResult {
|
||||
final Patient sourceResource;
|
||||
final Patient targetResource;
|
||||
final boolean isValid;
|
||||
final Integer httpStatusCode;
|
||||
|
||||
private MergeValidationResult(
|
||||
boolean theIsValid, Integer theHttpStatusCode, Patient theSourceResource, Patient theTargetResource) {
|
||||
isValid = theIsValid;
|
||||
httpStatusCode = theHttpStatusCode;
|
||||
sourceResource = theSourceResource;
|
||||
targetResource = theTargetResource;
|
||||
}
|
||||
|
||||
public static MergeValidationResult invalidResult(int theHttpStatusCode) {
|
||||
return new MergeValidationResult(false, theHttpStatusCode, null, null);
|
||||
}
|
||||
|
||||
public static MergeValidationResult validResult(Patient theSourceResource, Patient theTargetResource) {
|
||||
return new MergeValidationResult(true, null, theSourceResource, theTargetResource);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,462 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.provider.merge;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.TokenAndListParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.util.CanonicalIdentifier;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_400_BAD_REQUEST;
|
||||
import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_422_UNPROCESSABLE_ENTITY;
|
||||
|
||||
/**
|
||||
* Supporting class that validates input parameters to {@link ResourceMergeService}.
|
||||
*/
|
||||
class MergeValidationService {
|
||||
private final FhirContext myFhirContext;
|
||||
private final IFhirResourceDao<Patient> myPatientDao;
|
||||
|
||||
public MergeValidationService(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
|
||||
myFhirContext = theFhirContext;
|
||||
myPatientDao = theDaoRegistry.getResourceDao(Patient.class);
|
||||
}
|
||||
|
||||
MergeValidationResult validate(
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters,
|
||||
RequestDetails theRequestDetails,
|
||||
MergeOperationOutcome theMergeOutcome) {
|
||||
|
||||
IBaseOperationOutcome operationOutcome = theMergeOutcome.getOperationOutcome();
|
||||
|
||||
if (!validateMergeOperationParameters(theMergeOperationParameters, operationOutcome)) {
|
||||
return MergeValidationResult.invalidResult(STATUS_HTTP_400_BAD_REQUEST);
|
||||
}
|
||||
|
||||
// cast to Patient, since we only support merging Patient resources for now
|
||||
Patient sourceResource =
|
||||
(Patient) resolveSourceResource(theMergeOperationParameters, theRequestDetails, operationOutcome);
|
||||
|
||||
if (sourceResource == null) {
|
||||
return MergeValidationResult.invalidResult(STATUS_HTTP_422_UNPROCESSABLE_ENTITY);
|
||||
}
|
||||
|
||||
// cast to Patient, since we only support merging Patient resources for now
|
||||
Patient targetResource =
|
||||
(Patient) resolveTargetResource(theMergeOperationParameters, theRequestDetails, operationOutcome);
|
||||
|
||||
if (targetResource == null) {
|
||||
return MergeValidationResult.invalidResult(STATUS_HTTP_422_UNPROCESSABLE_ENTITY);
|
||||
}
|
||||
|
||||
if (!validateSourceAndTargetAreSuitableForMerge(sourceResource, targetResource, operationOutcome)) {
|
||||
return MergeValidationResult.invalidResult(STATUS_HTTP_422_UNPROCESSABLE_ENTITY);
|
||||
}
|
||||
|
||||
if (!validateResultResourceIfExists(
|
||||
theMergeOperationParameters, targetResource, sourceResource, operationOutcome)) {
|
||||
return MergeValidationResult.invalidResult(STATUS_HTTP_400_BAD_REQUEST);
|
||||
}
|
||||
return MergeValidationResult.validResult(sourceResource, targetResource);
|
||||
}
|
||||
|
||||
private boolean validateResultResourceIfExists(
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters,
|
||||
Patient theResolvedTargetResource,
|
||||
Patient theResolvedSourceResource,
|
||||
IBaseOperationOutcome theOperationOutcome) {
|
||||
|
||||
if (theMergeOperationParameters.getResultResource() == null) {
|
||||
// result resource is not provided, no further validation is needed
|
||||
return true;
|
||||
}
|
||||
|
||||
boolean retval = true;
|
||||
|
||||
Patient theResultResource = (Patient) theMergeOperationParameters.getResultResource();
|
||||
|
||||
// validate the result resource's id as same as the target resource
|
||||
if (!theResolvedTargetResource.getIdElement().toVersionless().equals(theResultResource.getIdElement())) {
|
||||
String msg = String.format(
|
||||
"'%s' must have the same versionless id as the actual resolved target resource '%s'. "
|
||||
+ "The actual resolved target resource's id is: '%s'",
|
||||
theMergeOperationParameters.getResultResourceParameterName(),
|
||||
theResultResource.getIdElement(),
|
||||
theResolvedTargetResource.getIdElement().toVersionless().getValue());
|
||||
addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
|
||||
retval = false;
|
||||
}
|
||||
|
||||
// validate the result resource contains the identifiers provided in the target identifiers param
|
||||
if (theMergeOperationParameters.hasAtLeastOneTargetIdentifier()
|
||||
&& !hasAllIdentifiers(theResultResource, theMergeOperationParameters.getTargetIdentifiers())) {
|
||||
String msg = String.format(
|
||||
"'%s' must have all the identifiers provided in %s",
|
||||
theMergeOperationParameters.getResultResourceParameterName(),
|
||||
theMergeOperationParameters.getTargetIdentifiersParameterName());
|
||||
addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
|
||||
retval = false;
|
||||
}
|
||||
|
||||
// if the source resource is not being deleted, the result resource must have a replaces link to the source
|
||||
// resource
|
||||
// if the source resource is being deleted, the result resource must not have a replaces link to the source
|
||||
// resource
|
||||
if (!validateResultResourceReplacesLinkToSourceResource(
|
||||
theResultResource,
|
||||
theResolvedSourceResource,
|
||||
theMergeOperationParameters.getResultResourceParameterName(),
|
||||
theMergeOperationParameters.getDeleteSource(),
|
||||
theOperationOutcome)) {
|
||||
retval = false;
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
private void addErrorToOperationOutcome(IBaseOperationOutcome theOutcome, String theDiagnosticMsg, String theCode) {
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, theOutcome, "error", theDiagnosticMsg, null, theCode);
|
||||
}
|
||||
|
||||
private boolean hasAllIdentifiers(Patient theResource, List<CanonicalIdentifier> theIdentifiers) {
|
||||
|
||||
List<Identifier> identifiersInResource = theResource.getIdentifier();
|
||||
for (CanonicalIdentifier identifier : theIdentifiers) {
|
||||
boolean identifierFound = identifiersInResource.stream()
|
||||
.anyMatch(i -> i.getSystem()
|
||||
.equals(identifier.getSystemElement().getValueAsString())
|
||||
&& i.getValue().equals(identifier.getValueElement().getValueAsString()));
|
||||
|
||||
if (!identifierFound) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean validateResultResourceReplacesLinkToSourceResource(
|
||||
Patient theResultResource,
|
||||
Patient theResolvedSourceResource,
|
||||
String theResultResourceParameterName,
|
||||
boolean theDeleteSource,
|
||||
IBaseOperationOutcome theOperationOutcome) {
|
||||
// the result resource must have the replaces link set to the source resource
|
||||
List<Reference> replacesLinkToSourceResource = getLinksToResource(
|
||||
theResultResource, Patient.LinkType.REPLACES, theResolvedSourceResource.getIdElement());
|
||||
|
||||
if (theDeleteSource) {
|
||||
if (!replacesLinkToSourceResource.isEmpty()) {
|
||||
String msg = String.format(
|
||||
"'%s' must not have a 'replaces' link to the source resource "
|
||||
+ "when the source resource will be deleted, as the link may prevent deleting the source "
|
||||
+ "resource.",
|
||||
theResultResourceParameterName);
|
||||
addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (replacesLinkToSourceResource.isEmpty()) {
|
||||
String msg = String.format(
|
||||
"'%s' must have a 'replaces' link to the source resource.", theResultResourceParameterName);
|
||||
addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (replacesLinkToSourceResource.size() > 1) {
|
||||
String msg = String.format(
|
||||
"'%s' has multiple 'replaces' links to the source resource. There should be only one.",
|
||||
theResultResourceParameterName);
|
||||
addErrorToOperationOutcome(theOperationOutcome, msg, "invalid");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private List<Reference> getLinksToResource(
|
||||
Patient theResource, Patient.LinkType theLinkType, IIdType theResourceId) {
|
||||
List<Reference> links = getLinksOfTypeWithNonNullReference(theResource, theLinkType);
|
||||
return links.stream()
|
||||
.filter(r -> theResourceId.toVersionless().getValue().equals(r.getReference()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private List<Reference> getLinksOfTypeWithNonNullReference(Patient theResource, Patient.LinkType theLinkType) {
|
||||
List<Reference> links = new ArrayList<>();
|
||||
if (theResource.hasLink()) {
|
||||
for (Patient.PatientLinkComponent link : theResource.getLink()) {
|
||||
if (theLinkType.equals(link.getType()) && link.hasOther()) {
|
||||
links.add(link.getOther());
|
||||
}
|
||||
}
|
||||
}
|
||||
return links;
|
||||
}
|
||||
|
||||
private boolean validateSourceAndTargetAreSuitableForMerge(
|
||||
Patient theSourceResource, Patient theTargetResource, IBaseOperationOutcome outcome) {
|
||||
|
||||
if (theSourceResource.getId().equalsIgnoreCase(theTargetResource.getId())) {
|
||||
String msg = "Source and target resources are the same resource.";
|
||||
// What is the right code to use in these cases?
|
||||
addErrorToOperationOutcome(outcome, msg, "invalid");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (theTargetResource.hasActive() && !theTargetResource.getActive()) {
|
||||
String msg = "Target resource is not active, it must be active to be the target of a merge operation.";
|
||||
addErrorToOperationOutcome(outcome, msg, "invalid");
|
||||
return false;
|
||||
}
|
||||
|
||||
List<Reference> replacedByLinksInTarget =
|
||||
getLinksOfTypeWithNonNullReference(theTargetResource, Patient.LinkType.REPLACEDBY);
|
||||
if (!replacedByLinksInTarget.isEmpty()) {
|
||||
String ref = replacedByLinksInTarget.get(0).getReference();
|
||||
String msg = String.format(
|
||||
"Target resource was previously replaced by a resource with reference '%s', it "
|
||||
+ "is not a suitable target for merging.",
|
||||
ref);
|
||||
addErrorToOperationOutcome(outcome, msg, "invalid");
|
||||
return false;
|
||||
}
|
||||
|
||||
List<Reference> replacedByLinksInSource =
|
||||
getLinksOfTypeWithNonNullReference(theSourceResource, Patient.LinkType.REPLACEDBY);
|
||||
if (!replacedByLinksInSource.isEmpty()) {
|
||||
String ref = replacedByLinksInSource.get(0).getReference();
|
||||
String msg = String.format(
|
||||
"Source resource was previously replaced by a resource with reference '%s', it "
|
||||
+ "is not a suitable source for merging.",
|
||||
ref);
|
||||
addErrorToOperationOutcome(outcome, msg, "invalid");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the merge operation parameters and adds validation errors to the outcome
|
||||
*
|
||||
* @param theMergeOperationParameters the merge operation parameters
|
||||
* @param theOutcome the outcome to add validation errors to
|
||||
* @return true if the parameters are valid, false otherwise
|
||||
*/
|
||||
private boolean validateMergeOperationParameters(
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters, IBaseOperationOutcome theOutcome) {
|
||||
List<String> errorMessages = new ArrayList<>();
|
||||
if (!theMergeOperationParameters.hasAtLeastOneSourceIdentifier()
|
||||
&& theMergeOperationParameters.getSourceResource() == null) {
|
||||
String msg = String.format(
|
||||
"There are no source resource parameters provided, include either a '%s', or a '%s' parameter.",
|
||||
theMergeOperationParameters.getSourceResourceParameterName(),
|
||||
theMergeOperationParameters.getSourceIdentifiersParameterName());
|
||||
errorMessages.add(msg);
|
||||
}
|
||||
|
||||
// Spec has conflicting information about this case
|
||||
if (theMergeOperationParameters.hasAtLeastOneSourceIdentifier()
|
||||
&& theMergeOperationParameters.getSourceResource() != null) {
|
||||
String msg = String.format(
|
||||
"Source resource must be provided either by '%s' or by '%s', not both.",
|
||||
theMergeOperationParameters.getSourceResourceParameterName(),
|
||||
theMergeOperationParameters.getSourceIdentifiersParameterName());
|
||||
errorMessages.add(msg);
|
||||
}
|
||||
|
||||
if (!theMergeOperationParameters.hasAtLeastOneTargetIdentifier()
|
||||
&& theMergeOperationParameters.getTargetResource() == null) {
|
||||
String msg = String.format(
|
||||
"There are no target resource parameters provided, include either a '%s', or a '%s' parameter.",
|
||||
theMergeOperationParameters.getTargetResourceParameterName(),
|
||||
theMergeOperationParameters.getTargetIdentifiersParameterName());
|
||||
errorMessages.add(msg);
|
||||
}
|
||||
|
||||
// Spec has conflicting information about this case
|
||||
if (theMergeOperationParameters.hasAtLeastOneTargetIdentifier()
|
||||
&& theMergeOperationParameters.getTargetResource() != null) {
|
||||
String msg = String.format(
|
||||
"Target resource must be provided either by '%s' or by '%s', not both.",
|
||||
theMergeOperationParameters.getTargetResourceParameterName(),
|
||||
theMergeOperationParameters.getTargetIdentifiersParameterName());
|
||||
errorMessages.add(msg);
|
||||
}
|
||||
|
||||
Reference sourceRef = (Reference) theMergeOperationParameters.getSourceResource();
|
||||
if (sourceRef != null && !sourceRef.hasReference()) {
|
||||
String msg = String.format(
|
||||
"Reference specified in '%s' parameter does not have a reference element.",
|
||||
theMergeOperationParameters.getSourceResourceParameterName());
|
||||
errorMessages.add(msg);
|
||||
}
|
||||
|
||||
Reference targetRef = (Reference) theMergeOperationParameters.getTargetResource();
|
||||
if (targetRef != null && !targetRef.hasReference()) {
|
||||
String msg = String.format(
|
||||
"Reference specified in '%s' parameter does not have a reference element.",
|
||||
theMergeOperationParameters.getTargetResourceParameterName());
|
||||
errorMessages.add(msg);
|
||||
}
|
||||
|
||||
if (!errorMessages.isEmpty()) {
|
||||
for (String validationError : errorMessages) {
|
||||
addErrorToOperationOutcome(theOutcome, validationError, "required");
|
||||
}
|
||||
// there are validation errors
|
||||
return false;
|
||||
}
|
||||
|
||||
// no validation errors
|
||||
return true;
|
||||
}
|
||||
|
||||
private IBaseResource resolveSourceResource(
|
||||
BaseMergeOperationInputParameters theOperationParameters,
|
||||
RequestDetails theRequestDetails,
|
||||
IBaseOperationOutcome theOutcome) {
|
||||
return resolveResource(
|
||||
theOperationParameters.getSourceResource(),
|
||||
theOperationParameters.getSourceIdentifiers(),
|
||||
theRequestDetails,
|
||||
theOutcome,
|
||||
theOperationParameters.getSourceResourceParameterName(),
|
||||
theOperationParameters.getSourceIdentifiersParameterName());
|
||||
}
|
||||
|
||||
private IBaseResource resolveTargetResource(
|
||||
BaseMergeOperationInputParameters theOperationParameters,
|
||||
RequestDetails theRequestDetails,
|
||||
IBaseOperationOutcome theOutcome) {
|
||||
return resolveResource(
|
||||
theOperationParameters.getTargetResource(),
|
||||
theOperationParameters.getTargetIdentifiers(),
|
||||
theRequestDetails,
|
||||
theOutcome,
|
||||
theOperationParameters.getTargetResourceParameterName(),
|
||||
theOperationParameters.getTargetIdentifiersParameterName());
|
||||
}
|
||||
|
||||
private IBaseResource resolveResource(
|
||||
IBaseReference theReference,
|
||||
List<CanonicalIdentifier> theIdentifiers,
|
||||
RequestDetails theRequestDetails,
|
||||
IBaseOperationOutcome theOutcome,
|
||||
String theOperationReferenceParameterName,
|
||||
String theOperationIdentifiersParameterName) {
|
||||
if (theReference != null) {
|
||||
return resolveResourceByReference(
|
||||
theReference, theRequestDetails, theOutcome, theOperationReferenceParameterName);
|
||||
}
|
||||
|
||||
return resolveResourceByIdentifiers(
|
||||
theIdentifiers, theRequestDetails, theOutcome, theOperationIdentifiersParameterName);
|
||||
}
|
||||
|
||||
private IBaseResource resolveResourceByIdentifiers(
|
||||
List<CanonicalIdentifier> theIdentifiers,
|
||||
RequestDetails theRequestDetails,
|
||||
IBaseOperationOutcome theOutcome,
|
||||
String theOperationParameterName) {
|
||||
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
TokenAndListParam tokenAndListParam = new TokenAndListParam();
|
||||
for (CanonicalIdentifier identifier : theIdentifiers) {
|
||||
TokenParam tokenParam = new TokenParam(
|
||||
identifier.getSystemElement().getValueAsString(),
|
||||
identifier.getValueElement().getValueAsString());
|
||||
tokenAndListParam.addAnd(tokenParam);
|
||||
}
|
||||
searchParameterMap.add("identifier", tokenAndListParam);
|
||||
searchParameterMap.setCount(2);
|
||||
|
||||
IBundleProvider bundle = myPatientDao.search(searchParameterMap, theRequestDetails);
|
||||
List<IBaseResource> resources = bundle.getAllResources();
|
||||
if (resources.isEmpty()) {
|
||||
String msg = String.format(
|
||||
"No resources found matching the identifier(s) specified in '%s'", theOperationParameterName);
|
||||
addErrorToOperationOutcome(theOutcome, msg, "not-found");
|
||||
return null;
|
||||
}
|
||||
if (resources.size() > 1) {
|
||||
String msg = String.format(
|
||||
"Multiple resources found matching the identifier(s) specified in '%s'", theOperationParameterName);
|
||||
addErrorToOperationOutcome(theOutcome, msg, "multiple-matches");
|
||||
return null;
|
||||
}
|
||||
|
||||
return resources.get(0);
|
||||
}
|
||||
|
||||
private IBaseResource resolveResourceByReference(
|
||||
IBaseReference theReference,
|
||||
RequestDetails theRequestDetails,
|
||||
IBaseOperationOutcome theOutcome,
|
||||
String theOperationParameterName) {
|
||||
// TODO Emre: why does IBaseReference not have getIdentifier or hasReference methods?
|
||||
// casting it to r4.Reference for now
|
||||
Reference r4ref = (Reference) theReference;
|
||||
|
||||
IIdType theResourceId = new IdType(r4ref.getReferenceElement().getValue());
|
||||
IBaseResource resource;
|
||||
try {
|
||||
resource = myPatientDao.read(theResourceId.toVersionless(), theRequestDetails);
|
||||
} catch (ResourceNotFoundException e) {
|
||||
String msg = String.format(
|
||||
"Resource not found for the reference specified in '%s' parameter", theOperationParameterName);
|
||||
addErrorToOperationOutcome(theOutcome, msg, "not-found");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (theResourceId.hasVersionIdPart()
|
||||
&& !theResourceId
|
||||
.getVersionIdPart()
|
||||
.equals(resource.getIdElement().getVersionIdPart())) {
|
||||
String msg = String.format(
|
||||
"The reference in '%s' parameter has a version specified, "
|
||||
+ "but it is not the latest version of the resource",
|
||||
theOperationParameterName);
|
||||
addErrorToOperationOutcome(theOutcome, msg, "conflict");
|
||||
return null;
|
||||
}
|
||||
|
||||
return resource;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.provider.merge;
|
||||
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_RESULT_PATIENT;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_SOURCE_PATIENT;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_SOURCE_PATIENT_IDENTIFIER;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_TARGET_PATIENT;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_TARGET_PATIENT_IDENTIFIER;
|
||||
|
||||
/**
|
||||
* See <a href="https://build.fhir.org/patient-operation-merge.html">Patient $merge spec</a>
|
||||
*/
|
||||
public class PatientMergeOperationInputParameters extends BaseMergeOperationInputParameters {
|
||||
public PatientMergeOperationInputParameters(int theResourceLimit) {
|
||||
super(theResourceLimit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceResourceParameterName() {
|
||||
return OPERATION_MERGE_PARAM_SOURCE_PATIENT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTargetResourceParameterName() {
|
||||
return OPERATION_MERGE_PARAM_TARGET_PATIENT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceIdentifiersParameterName() {
|
||||
return OPERATION_MERGE_PARAM_SOURCE_PATIENT_IDENTIFIER;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTargetIdentifiersParameterName() {
|
||||
return OPERATION_MERGE_PARAM_TARGET_PATIENT_IDENTIFIER;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getResultResourceParameterName() {
|
||||
return OPERATION_MERGE_PARAM_RESULT_PATIENT;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
package ca.uhn.fhir.jpa.provider.merge;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.merge.MergeResourceHelper;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.provider.BaseJpaResourceProvider;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
import ca.uhn.fhir.rest.annotation.OperationParam;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.CanonicalIdentifier;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseReference;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_RESULT;
|
||||
|
||||
public class PatientMergeProvider extends BaseJpaResourceProvider<Patient> {
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final ResourceMergeService myResourceMergeService;
|
||||
|
||||
public PatientMergeProvider(
|
||||
FhirContext theFhirContext, DaoRegistry theDaoRegistry, ResourceMergeService theResourceMergeService) {
|
||||
super(theDaoRegistry.getResourceDao("Patient"));
|
||||
myFhirContext = theFhirContext;
|
||||
assert myFhirContext.getVersion().getVersion() == FhirVersionEnum.R4;
|
||||
myResourceMergeService = theResourceMergeService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<Patient> getResourceType() {
|
||||
return Patient.class;
|
||||
}
|
||||
|
||||
/**
|
||||
* /Patient/$merge
|
||||
*/
|
||||
@Operation(
|
||||
name = ProviderConstants.OPERATION_MERGE,
|
||||
canonicalUrl = "http://hl7.org/fhir/OperationDefinition/Patient-merge")
|
||||
public IBaseParameters patientMerge(
|
||||
HttpServletRequest theServletRequest,
|
||||
HttpServletResponse theServletResponse,
|
||||
ServletRequestDetails theRequestDetails,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_SOURCE_PATIENT_IDENTIFIER)
|
||||
List<Identifier> theSourcePatientIdentifier,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_TARGET_PATIENT_IDENTIFIER)
|
||||
List<Identifier> theTargetPatientIdentifier,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_SOURCE_PATIENT, max = 1)
|
||||
IBaseReference theSourcePatient,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_TARGET_PATIENT, max = 1)
|
||||
IBaseReference theTargetPatient,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_PREVIEW, typeName = "boolean", max = 1)
|
||||
IPrimitiveType<Boolean> thePreview,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_DELETE_SOURCE, typeName = "boolean", max = 1)
|
||||
IPrimitiveType<Boolean> theDeleteSource,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_RESULT_PATIENT, max = 1)
|
||||
IBaseResource theResultPatient,
|
||||
@OperationParam(name = ProviderConstants.OPERATION_MERGE_PARAM_BATCH_SIZE, typeName = "unsignedInt")
|
||||
IPrimitiveType<Integer> theResourceLimit) {
|
||||
|
||||
startRequest(theServletRequest);
|
||||
|
||||
try {
|
||||
int resourceLimit = MergeResourceHelper.setResourceLimitFromParameter(myStorageSettings, theResourceLimit);
|
||||
|
||||
BaseMergeOperationInputParameters mergeOperationParameters = buildMergeOperationInputParameters(
|
||||
theSourcePatientIdentifier,
|
||||
theTargetPatientIdentifier,
|
||||
theSourcePatient,
|
||||
theTargetPatient,
|
||||
thePreview,
|
||||
theDeleteSource,
|
||||
theResultPatient,
|
||||
resourceLimit);
|
||||
|
||||
MergeOperationOutcome mergeOutcome =
|
||||
myResourceMergeService.merge(mergeOperationParameters, theRequestDetails);
|
||||
|
||||
theServletResponse.setStatus(mergeOutcome.getHttpStatusCode());
|
||||
return buildMergeOperationOutputParameters(myFhirContext, mergeOutcome, theRequestDetails.getResource());
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
}
|
||||
|
||||
private IBaseParameters buildMergeOperationOutputParameters(
|
||||
FhirContext theFhirContext, MergeOperationOutcome theMergeOutcome, IBaseResource theInputParameters) {
|
||||
|
||||
IBaseParameters retVal = ParametersUtil.newInstance(theFhirContext);
|
||||
ParametersUtil.addParameterToParameters(
|
||||
theFhirContext, retVal, ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_INPUT, theInputParameters);
|
||||
|
||||
ParametersUtil.addParameterToParameters(
|
||||
theFhirContext,
|
||||
retVal,
|
||||
ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_OUTCOME,
|
||||
theMergeOutcome.getOperationOutcome());
|
||||
|
||||
if (theMergeOutcome.getUpdatedTargetResource() != null) {
|
||||
ParametersUtil.addParameterToParameters(
|
||||
theFhirContext,
|
||||
retVal,
|
||||
OPERATION_MERGE_OUTPUT_PARAM_RESULT,
|
||||
theMergeOutcome.getUpdatedTargetResource());
|
||||
}
|
||||
|
||||
if (theMergeOutcome.getTask() != null) {
|
||||
ParametersUtil.addParameterToParameters(
|
||||
theFhirContext,
|
||||
retVal,
|
||||
ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_TASK,
|
||||
theMergeOutcome.getTask());
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private BaseMergeOperationInputParameters buildMergeOperationInputParameters(
|
||||
List<Identifier> theSourcePatientIdentifier,
|
||||
List<Identifier> theTargetPatientIdentifier,
|
||||
IBaseReference theSourcePatient,
|
||||
IBaseReference theTargetPatient,
|
||||
IPrimitiveType<Boolean> thePreview,
|
||||
IPrimitiveType<Boolean> theDeleteSource,
|
||||
IBaseResource theResultPatient,
|
||||
int theResourceLimit) {
|
||||
BaseMergeOperationInputParameters mergeOperationParameters =
|
||||
new PatientMergeOperationInputParameters(theResourceLimit);
|
||||
if (theSourcePatientIdentifier != null) {
|
||||
List<CanonicalIdentifier> sourceResourceIdentifiers = theSourcePatientIdentifier.stream()
|
||||
.map(CanonicalIdentifier::fromIdentifier)
|
||||
.collect(Collectors.toList());
|
||||
mergeOperationParameters.setSourceResourceIdentifiers(sourceResourceIdentifiers);
|
||||
}
|
||||
if (theTargetPatientIdentifier != null) {
|
||||
List<CanonicalIdentifier> targetResourceIdentifiers = theTargetPatientIdentifier.stream()
|
||||
.map(CanonicalIdentifier::fromIdentifier)
|
||||
.collect(Collectors.toList());
|
||||
mergeOperationParameters.setTargetResourceIdentifiers(targetResourceIdentifiers);
|
||||
}
|
||||
mergeOperationParameters.setSourceResource(theSourcePatient);
|
||||
mergeOperationParameters.setTargetResource(theTargetPatient);
|
||||
mergeOperationParameters.setPreview(thePreview != null && thePreview.getValue());
|
||||
mergeOperationParameters.setDeleteSource(theDeleteSource != null && theDeleteSource.getValue());
|
||||
|
||||
if (theResultPatient != null) {
|
||||
// pass in a copy of the result patient as we don't want it to be modified. It will be
|
||||
// returned back to the client as part of the response.
|
||||
mergeOperationParameters.setResultResource(((Patient) theResultPatient).copy());
|
||||
}
|
||||
|
||||
return mergeOperationParameters;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,265 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.provider.merge;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.merge.MergeJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.merge.MergeResourceHelper;
|
||||
import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.provider.IReplaceReferencesSvc;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.util.OperationOutcomeUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static ca.uhn.fhir.batch2.jobs.merge.MergeAppCtx.JOB_MERGE;
|
||||
import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_200_OK;
|
||||
import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_202_ACCEPTED;
|
||||
import static ca.uhn.fhir.rest.api.Constants.STATUS_HTTP_500_INTERNAL_ERROR;
|
||||
|
||||
/**
|
||||
* Service for the FHIR $merge operation. Currently only supports Patient/$merge. The plan is to expand to other resource types.
|
||||
*/
|
||||
public class ResourceMergeService {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ResourceMergeService.class);
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
private final IFhirResourceDao<Patient> myPatientDao;
|
||||
private final IReplaceReferencesSvc myReplaceReferencesSvc;
|
||||
private final IHapiTransactionService myHapiTransactionService;
|
||||
private final IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
private final IFhirResourceDao<Task> myTaskDao;
|
||||
private final IJobCoordinator myJobCoordinator;
|
||||
private final MergeResourceHelper myMergeResourceHelper;
|
||||
private final Batch2TaskHelper myBatch2TaskHelper;
|
||||
private final MergeValidationService myMergeValidationService;
|
||||
|
||||
public ResourceMergeService(
|
||||
JpaStorageSettings theStorageSettings,
|
||||
DaoRegistry theDaoRegistry,
|
||||
IReplaceReferencesSvc theReplaceReferencesSvc,
|
||||
IHapiTransactionService theHapiTransactionService,
|
||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc,
|
||||
IJobCoordinator theJobCoordinator,
|
||||
Batch2TaskHelper theBatch2TaskHelper) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
|
||||
myPatientDao = theDaoRegistry.getResourceDao(Patient.class);
|
||||
myTaskDao = theDaoRegistry.getResourceDao(Task.class);
|
||||
myReplaceReferencesSvc = theReplaceReferencesSvc;
|
||||
myRequestPartitionHelperSvc = theRequestPartitionHelperSvc;
|
||||
myJobCoordinator = theJobCoordinator;
|
||||
myBatch2TaskHelper = theBatch2TaskHelper;
|
||||
myFhirContext = myPatientDao.getContext();
|
||||
myHapiTransactionService = theHapiTransactionService;
|
||||
myMergeResourceHelper = new MergeResourceHelper(myPatientDao);
|
||||
myMergeValidationService = new MergeValidationService(myFhirContext, theDaoRegistry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform the $merge operation. If the number of resources to be changed exceeds the provided batch size,
|
||||
* then switch to async mode. See the <a href="https://build.fhir.org/patient-operation-merge.html">Patient $merge spec</a>
|
||||
* for details on what the difference is between synchronous and asynchronous mode.
|
||||
*
|
||||
* @param theMergeOperationParameters the merge operation parameters
|
||||
* @param theRequestDetails the request details
|
||||
* @return the merge outcome containing OperationOutcome and HTTP status code
|
||||
*/
|
||||
public MergeOperationOutcome merge(
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters, RequestDetails theRequestDetails) {
|
||||
|
||||
MergeOperationOutcome mergeOutcome = new MergeOperationOutcome();
|
||||
IBaseOperationOutcome operationOutcome = OperationOutcomeUtil.newInstance(myFhirContext);
|
||||
mergeOutcome.setOperationOutcome(operationOutcome);
|
||||
// default to 200 OK, would be changed to another code during processing as required
|
||||
mergeOutcome.setHttpStatusCode(STATUS_HTTP_200_OK);
|
||||
try {
|
||||
validateAndMerge(theMergeOperationParameters, theRequestDetails, mergeOutcome);
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Resource merge failed", e);
|
||||
if (e instanceof BaseServerResponseException) {
|
||||
mergeOutcome.setHttpStatusCode(((BaseServerResponseException) e).getStatusCode());
|
||||
} else {
|
||||
mergeOutcome.setHttpStatusCode(STATUS_HTTP_500_INTERNAL_ERROR);
|
||||
}
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, operationOutcome, "error", e.getMessage(), null, "exception");
|
||||
}
|
||||
return mergeOutcome;
|
||||
}
|
||||
|
||||
private void validateAndMerge(
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters,
|
||||
RequestDetails theRequestDetails,
|
||||
MergeOperationOutcome theMergeOutcome) {
|
||||
|
||||
// TODO KHS remove the outparameter and instead accumulate issues in the validation result
|
||||
MergeValidationResult mergeValidationResult =
|
||||
myMergeValidationService.validate(theMergeOperationParameters, theRequestDetails, theMergeOutcome);
|
||||
|
||||
if (mergeValidationResult.isValid) {
|
||||
Patient sourceResource = mergeValidationResult.sourceResource;
|
||||
Patient targetResource = mergeValidationResult.targetResource;
|
||||
|
||||
if (theMergeOperationParameters.getPreview()) {
|
||||
handlePreview(
|
||||
sourceResource,
|
||||
targetResource,
|
||||
theMergeOperationParameters,
|
||||
theRequestDetails,
|
||||
theMergeOutcome);
|
||||
} else {
|
||||
doMerge(
|
||||
theMergeOperationParameters,
|
||||
sourceResource,
|
||||
targetResource,
|
||||
theRequestDetails,
|
||||
theMergeOutcome);
|
||||
}
|
||||
} else {
|
||||
theMergeOutcome.setHttpStatusCode(mergeValidationResult.httpStatusCode);
|
||||
}
|
||||
}
|
||||
|
||||
private void handlePreview(
|
||||
Patient theSourceResource,
|
||||
Patient theTargetResource,
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters,
|
||||
RequestDetails theRequestDetails,
|
||||
MergeOperationOutcome theMergeOutcome) {
|
||||
|
||||
Integer referencingResourceCount = myReplaceReferencesSvc.countResourcesReferencingResource(
|
||||
theSourceResource.getIdElement().toVersionless(), theRequestDetails);
|
||||
|
||||
// in preview mode, we should also return what the target would look like
|
||||
Patient theResultResource = (Patient) theMergeOperationParameters.getResultResource();
|
||||
Patient targetPatientAsIfUpdated = myMergeResourceHelper.prepareTargetPatientForUpdate(
|
||||
theTargetResource, theSourceResource, theResultResource, theMergeOperationParameters.getDeleteSource());
|
||||
theMergeOutcome.setUpdatedTargetResource(targetPatientAsIfUpdated);
|
||||
|
||||
// adding +2 because the source and the target resources would be updated as well
|
||||
String diagnosticsMsg = String.format("Merge would update %d resources", referencingResourceCount + 2);
|
||||
String detailsText = "Preview only merge operation - no issues detected";
|
||||
addInfoToOperationOutcome(theMergeOutcome.getOperationOutcome(), diagnosticsMsg, detailsText);
|
||||
}
|
||||
|
||||
private void doMerge(
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters,
|
||||
Patient theSourceResource,
|
||||
Patient theTargetResource,
|
||||
RequestDetails theRequestDetails,
|
||||
MergeOperationOutcome theMergeOutcome) {
|
||||
|
||||
RequestPartitionId partitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(
|
||||
theRequestDetails, ReadPartitionIdRequestDetails.forRead(theTargetResource.getIdElement()));
|
||||
|
||||
if (theRequestDetails.isPreferAsync()) {
|
||||
doMergeAsync(
|
||||
theMergeOperationParameters,
|
||||
theSourceResource,
|
||||
theTargetResource,
|
||||
theRequestDetails,
|
||||
theMergeOutcome,
|
||||
partitionId);
|
||||
} else {
|
||||
doMergeSync(
|
||||
theMergeOperationParameters,
|
||||
theSourceResource,
|
||||
theTargetResource,
|
||||
theRequestDetails,
|
||||
theMergeOutcome,
|
||||
partitionId);
|
||||
}
|
||||
}
|
||||
|
||||
private void doMergeSync(
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters,
|
||||
Patient theSourceResource,
|
||||
Patient theTargetResource,
|
||||
RequestDetails theRequestDetails,
|
||||
MergeOperationOutcome theMergeOutcome,
|
||||
RequestPartitionId partitionId) {
|
||||
|
||||
ReplaceReferencesRequest replaceReferencesRequest = new ReplaceReferencesRequest(
|
||||
theSourceResource.getIdElement(),
|
||||
theTargetResource.getIdElement(),
|
||||
theMergeOperationParameters.getResourceLimit(),
|
||||
partitionId);
|
||||
|
||||
myReplaceReferencesSvc.replaceReferences(replaceReferencesRequest, theRequestDetails);
|
||||
|
||||
Patient updatedTarget = myMergeResourceHelper.updateMergedResourcesAfterReferencesReplaced(
|
||||
myHapiTransactionService,
|
||||
theSourceResource,
|
||||
theTargetResource,
|
||||
(Patient) theMergeOperationParameters.getResultResource(),
|
||||
theMergeOperationParameters.getDeleteSource(),
|
||||
theRequestDetails);
|
||||
theMergeOutcome.setUpdatedTargetResource(updatedTarget);
|
||||
|
||||
String detailsText = "Merge operation completed successfully.";
|
||||
addInfoToOperationOutcome(theMergeOutcome.getOperationOutcome(), null, detailsText);
|
||||
}
|
||||
|
||||
private void doMergeAsync(
|
||||
BaseMergeOperationInputParameters theMergeOperationParameters,
|
||||
Patient theSourceResource,
|
||||
Patient theTargetResource,
|
||||
RequestDetails theRequestDetails,
|
||||
MergeOperationOutcome theMergeOutcome,
|
||||
RequestPartitionId thePartitionId) {
|
||||
|
||||
MergeJobParameters mergeJobParameters = theMergeOperationParameters.asMergeJobParameters(
|
||||
myFhirContext, myStorageSettings, theSourceResource, theTargetResource, thePartitionId);
|
||||
|
||||
Task task = myBatch2TaskHelper.startJobAndCreateAssociatedTask(
|
||||
myTaskDao, theRequestDetails, myJobCoordinator, JOB_MERGE, mergeJobParameters);
|
||||
|
||||
task.setIdElement(task.getIdElement().toUnqualifiedVersionless());
|
||||
task.getMeta().setVersionId(null);
|
||||
theMergeOutcome.setTask(task);
|
||||
theMergeOutcome.setHttpStatusCode(STATUS_HTTP_202_ACCEPTED);
|
||||
|
||||
String detailsText = "Merge request is accepted, and will be processed asynchronously. See"
|
||||
+ " task resource returned in this response for details.";
|
||||
addInfoToOperationOutcome(theMergeOutcome.getOperationOutcome(), null, detailsText);
|
||||
}
|
||||
|
||||
private void addInfoToOperationOutcome(
|
||||
IBaseOperationOutcome theOutcome, String theDiagnosticMsg, String theDetailsText) {
|
||||
IBase issue =
|
||||
OperationOutcomeUtil.addIssue(myFhirContext, theOutcome, "information", theDiagnosticMsg, null, null);
|
||||
OperationOutcomeUtil.addDetailsToIssue(myFhirContext, issue, null, null, theDetailsText);
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -55,12 +55,12 @@ public abstract class BaseSubscriptionSettings {
|
|||
* If this is enabled (default is {@literal false}), changes to Subscription resource would be put on queue immediately.
|
||||
* Reducing delay between creation of the Subscription and Activation.
|
||||
*
|
||||
* @since 7.8.0
|
||||
* @since 8.0.0
|
||||
*/
|
||||
private boolean mySubscriptionChangeQueuedImmediately = false;
|
||||
|
||||
/**
|
||||
* @since 7.8.0
|
||||
* @since 8.0.0
|
||||
*
|
||||
* Regex To perform validation on the endpoint URL for Subscription of type RESTHOOK.
|
||||
*/
|
||||
|
@ -289,7 +289,7 @@ public abstract class BaseSubscriptionSettings {
|
|||
* If this is enabled (default is {@literal false}), changes to Subscription resource would be put on queue immediately.
|
||||
* Reducing delay between creation of the Subscription and Activation.
|
||||
*
|
||||
* @since 7.8.0
|
||||
* @since 8.0.0
|
||||
*/
|
||||
public boolean isSubscriptionChangeQueuedImmediately() {
|
||||
return mySubscriptionChangeQueuedImmediately;
|
||||
|
@ -299,7 +299,7 @@ public abstract class BaseSubscriptionSettings {
|
|||
* If this is enabled (default is {@literal false}), changes to Subscription resource would be put on queue immediately.
|
||||
* Reducing delay between creation of the Subscription and Activation.
|
||||
*
|
||||
* @since 7.8.0
|
||||
* @since 8.0.0
|
||||
*/
|
||||
public void setSubscriptionChangeQueuedImmediately(boolean theSubscriptionChangeQueuedImmediately) {
|
||||
mySubscriptionChangeQueuedImmediately = theSubscriptionChangeQueuedImmediately;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -43,7 +43,7 @@ class Batch2DaoSvcImplTest extends BaseJpaR4Test {
|
|||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
mySvc = new Batch2DaoSvcImpl(myResourceTableDao, myMatchUrlService, myDaoRegistry, myFhirContext, myIHapiTransactionService);
|
||||
mySvc = new Batch2DaoSvcImpl(myResourceTableDao, myResourceLinkDao, myMatchUrlService, myDaoRegistry, myFhirContext, myIHapiTransactionService);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -0,0 +1,114 @@
|
|||
package ca.uhn.fhir.jpa.provider.merge;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
|
||||
import ca.uhn.fhir.batch2.jobs.merge.MergeJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.replacereferences.ReplaceReferencesTestHelper;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.test.Batch2JobHelper;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.batch2.jobs.merge.MergeAppCtx.JOB_MERGE;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
||||
public class MergeBatchTest extends BaseJpaR4Test {
|
||||
|
||||
@Autowired
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private Batch2JobHelper myBatch2JobHelper;
|
||||
|
||||
SystemRequestDetails mySrd = new SystemRequestDetails();
|
||||
|
||||
private ReplaceReferencesTestHelper myTestHelper;
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
|
||||
myTestHelper = new ReplaceReferencesTestHelper(myFhirContext, myDaoRegistry);
|
||||
myTestHelper.beforeEach();
|
||||
|
||||
mySrd.setRequestPartitionId(RequestPartitionId.allPartitions());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
"true,true",
|
||||
"false,true",
|
||||
"true,false",
|
||||
"false,false"
|
||||
})
|
||||
public void testHappyPath(boolean theDeleteSource, boolean theWithResultResource) {
|
||||
IIdType taskId = createTask();
|
||||
|
||||
MergeJobParameters jobParams = new MergeJobParameters();
|
||||
jobParams.setSourceId(new FhirIdJson(myTestHelper.getSourcePatientId()));
|
||||
jobParams.setTargetId(new FhirIdJson(myTestHelper.getTargetPatientId()));
|
||||
jobParams.setTaskId(taskId);
|
||||
jobParams.setDeleteSource(theDeleteSource);
|
||||
if (theWithResultResource) {
|
||||
String encodedResultPatient = myFhirContext.newJsonParser().encodeResourceToString(myTestHelper.createResultPatient(theDeleteSource));
|
||||
jobParams.setResultResource(encodedResultPatient);
|
||||
}
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest(JOB_MERGE, jobParams);
|
||||
Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(mySrd, request);
|
||||
JobInstance jobInstance = myBatch2JobHelper.awaitJobCompletion(jobStartResponse);
|
||||
|
||||
Bundle patchResultBundle = myTestHelper.validateCompletedTask(jobInstance, taskId);
|
||||
ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle, ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES,
|
||||
List.of(
|
||||
"Observation", "Encounter", "CarePlan"));
|
||||
|
||||
|
||||
myTestHelper.assertAllReferencesUpdated();
|
||||
myTestHelper.assertSourcePatientUpdatedOrDeleted(theDeleteSource);
|
||||
myTestHelper.assertTargetPatientUpdated(theDeleteSource,
|
||||
myTestHelper.getExpectedIdentifiersForTargetAfterMerge(theWithResultResource));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMergeJob_JobFails_ErrorHandlerSetsAssociatedTaskStatusToFailed() {
|
||||
IIdType taskId = createTask();
|
||||
|
||||
MergeJobParameters jobParams = new MergeJobParameters();
|
||||
//use a source that does not exist to force the job to fail
|
||||
jobParams.setSourceId(new FhirIdJson("Patient", "doesnotexist"));
|
||||
jobParams.setTargetId(new FhirIdJson(myTestHelper.getTargetPatientId()));
|
||||
jobParams.setTaskId(taskId);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest(JOB_MERGE, jobParams);
|
||||
Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(mySrd, request);
|
||||
myBatch2JobHelper.awaitJobFailure(jobStartResponse);
|
||||
|
||||
await().until(() -> {
|
||||
myBatch2JobHelper.runMaintenancePass();
|
||||
return myTaskDao.read(taskId, mySrd).getStatus().equals(Task.TaskStatus.FAILED);
|
||||
});
|
||||
}
|
||||
|
||||
private IIdType createTask() {
|
||||
Task task = new Task();
|
||||
task.setStatus(Task.TaskStatus.INPROGRESS);
|
||||
return myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,390 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.replacereferences.ReplaceReferencesTestHelper;
|
||||
import ca.uhn.fhir.jpa.test.Batch2JobHelper;
|
||||
import ca.uhn.fhir.parser.StrictErrorHandler;
|
||||
import ca.uhn.fhir.rest.gclient.IOperationUntypedWithInput;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.OperationOutcome;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtensionContext;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.api.extension.TestExecutionExceptionHandler;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.provider.ReplaceReferencesSvcImpl.RESOURCE_TYPES_SYSTEM;
|
||||
import static ca.uhn.fhir.rest.api.Constants.HEADER_PREFER;
|
||||
import static ca.uhn.fhir.rest.api.Constants.HEADER_PREFER_RESPOND_ASYNC;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_INPUT;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_OUTCOME;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_RESULT;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_OUTPUT_PARAM_TASK;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_MERGE_PARAM_RESULT_PATIENT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class PatientMergeR4Test extends BaseResourceProviderR4Test {
|
||||
static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(PatientMergeR4Test.class);
|
||||
|
||||
@RegisterExtension
|
||||
MyExceptionHandler ourExceptionHandler = new MyExceptionHandler();
|
||||
|
||||
@Autowired
|
||||
Batch2JobHelper myBatch2JobHelper;
|
||||
|
||||
ReplaceReferencesTestHelper myTestHelper;
|
||||
|
||||
@Override
|
||||
@AfterEach
|
||||
public void after() throws Exception {
|
||||
super.after();
|
||||
|
||||
myStorageSettings.setDefaultTransactionEntriesForWrite(new JpaStorageSettings().getDefaultTransactionEntriesForWrite());
|
||||
myStorageSettings.setReuseCachedSearchResultsForMillis(new JpaStorageSettings().getReuseCachedSearchResultsForMillis());
|
||||
}
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
||||
myStorageSettings.setAllowMultipleDelete(true);
|
||||
myFhirContext.setParserErrorHandler(new StrictErrorHandler());
|
||||
|
||||
myTestHelper = new ReplaceReferencesTestHelper(myFhirContext, myDaoRegistry);
|
||||
myTestHelper.beforeEach();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// withDelete, withInputResultPatient, withPreview, isAsync
|
||||
"true, true, true, false",
|
||||
"true, false, true, false",
|
||||
"false, true, true, false",
|
||||
"false, false, true, false",
|
||||
"true, true, false, false",
|
||||
"true, false, false, false",
|
||||
"false, true, false, false",
|
||||
"false, false, false, false",
|
||||
|
||||
"true, true, true, true",
|
||||
"true, false, true, true",
|
||||
"false, true, true, true",
|
||||
"false, false, true, true",
|
||||
"true, true, false, true",
|
||||
"true, false, false, true",
|
||||
"false, true, false, true",
|
||||
"false, false, false, true",
|
||||
})
|
||||
public void testMerge(boolean withDelete, boolean withInputResultPatient, boolean withPreview, boolean isAsync) {
|
||||
// setup
|
||||
|
||||
ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = new ReplaceReferencesTestHelper.PatientMergeInputParameters();
|
||||
myTestHelper.setSourceAndTarget(inParams);
|
||||
inParams.deleteSource = withDelete;
|
||||
if (withInputResultPatient) {
|
||||
inParams.resultPatient = myTestHelper.createResultPatient(withDelete);
|
||||
}
|
||||
if (withPreview) {
|
||||
inParams.preview = true;
|
||||
}
|
||||
|
||||
Parameters inParameters = inParams.asParametersResource();
|
||||
|
||||
// exec
|
||||
Parameters outParams = callMergeOperation(inParameters, isAsync);
|
||||
|
||||
// validate
|
||||
// in async mode, there will be an additional task resource in the output params
|
||||
assertThat(outParams.getParameter()).hasSizeBetween(3, 4);
|
||||
|
||||
// Assert input
|
||||
Parameters input = (Parameters) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_INPUT).getResource();
|
||||
if (withInputResultPatient) { // if the following assert fails, check that these two patients are identical
|
||||
Patient p1 = (Patient) inParameters.getParameter(OPERATION_MERGE_PARAM_RESULT_PATIENT).getResource();
|
||||
Patient p2 = (Patient) input.getParameter(OPERATION_MERGE_PARAM_RESULT_PATIENT).getResource();
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(p1));
|
||||
ourLog.info(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(p2));
|
||||
}
|
||||
assertTrue(input.equalsDeep(inParameters));
|
||||
|
||||
|
||||
List<Identifier> expectedIdentifiersOnTargetAfterMerge =
|
||||
myTestHelper.getExpectedIdentifiersForTargetAfterMerge(withInputResultPatient);
|
||||
|
||||
// Assert Task inAsync mode, unless it is preview in which case we don't return a task
|
||||
if (isAsync && !withPreview) {
|
||||
assertThat(getLastHttpStatusCode()).isEqualTo(HttpServletResponse.SC_ACCEPTED);
|
||||
|
||||
Task task = (Task) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_TASK).getResource();
|
||||
assertNull(task.getIdElement().getVersionIdPart());
|
||||
ourLog.info("Got task {}", task.getId());
|
||||
String jobId = myTestHelper.getJobIdFromTask(task);
|
||||
myBatch2JobHelper.awaitJobCompletion(jobId);
|
||||
|
||||
Task taskWithOutput = myTaskDao.read(task.getIdElement(), mySrd);
|
||||
assertThat(taskWithOutput.getStatus()).isEqualTo(Task.TaskStatus.COMPLETED);
|
||||
ourLog.info("Complete Task: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(taskWithOutput));
|
||||
|
||||
Task.TaskOutputComponent taskOutput = taskWithOutput.getOutputFirstRep();
|
||||
|
||||
// Assert on the output type
|
||||
Coding taskType = taskOutput.getType().getCodingFirstRep();
|
||||
assertEquals(RESOURCE_TYPES_SYSTEM, taskType.getSystem());
|
||||
assertEquals("Bundle", taskType.getCode());
|
||||
|
||||
List<Resource> containedResources = taskWithOutput.getContained();
|
||||
assertThat(containedResources)
|
||||
.hasSize(1)
|
||||
.element(0)
|
||||
.isInstanceOf(Bundle.class);
|
||||
|
||||
Bundle containedBundle = (Bundle) containedResources.get(0);
|
||||
|
||||
Reference outputRef = (Reference) taskOutput.getValue();
|
||||
Bundle patchResultBundle = (Bundle) outputRef.getResource();
|
||||
assertTrue(containedBundle.equalsDeep(patchResultBundle));
|
||||
ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle,
|
||||
ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES,
|
||||
List.of("Observation", "Encounter", "CarePlan"));
|
||||
|
||||
OperationOutcome outcome = (OperationOutcome) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_OUTCOME).getResource();
|
||||
assertThat(outcome.getIssue())
|
||||
.hasSize(1)
|
||||
.element(0)
|
||||
.satisfies(issue -> {
|
||||
assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
|
||||
assertThat(issue.getDetails().getText()).isEqualTo("Merge request is accepted, and will be " +
|
||||
"processed asynchronously. See task resource returned in this response for details.");
|
||||
});
|
||||
|
||||
} else { // Synchronous case
|
||||
// Assert outcome
|
||||
OperationOutcome outcome = (OperationOutcome) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_OUTCOME).getResource();
|
||||
|
||||
if (withPreview) {
|
||||
assertThat(outcome.getIssue())
|
||||
.hasSize(1)
|
||||
.element(0)
|
||||
.satisfies(issue -> {
|
||||
assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
|
||||
assertThat(issue.getDetails().getText()).isEqualTo("Preview only merge operation - no issues detected");
|
||||
assertThat(issue.getDiagnostics()).isEqualTo("Merge would update 25 resources");
|
||||
});
|
||||
} else {
|
||||
assertThat(outcome.getIssue())
|
||||
.hasSize(1)
|
||||
.element(0)
|
||||
.satisfies(issue -> {
|
||||
assertThat(issue.getSeverity()).isEqualTo(OperationOutcome.IssueSeverity.INFORMATION);
|
||||
assertThat(issue.getDetails().getText()).isEqualTo("Merge operation completed successfully.");
|
||||
});
|
||||
}
|
||||
|
||||
// Assert Merged Patient
|
||||
Patient mergedPatient = (Patient) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_RESULT).getResource();
|
||||
List<Identifier> identifiers = mergedPatient.getIdentifier();
|
||||
|
||||
// TODO ED We can also validate that result patient returned here has the same id as the target patient.
|
||||
// And maybe in not preview case, we should also read the target patient from the db and assert it equals to the result returned.
|
||||
myTestHelper.assertIdentifiers(identifiers, expectedIdentifiersOnTargetAfterMerge);
|
||||
}
|
||||
|
||||
// Check that the linked resources were updated
|
||||
if (withPreview) {
|
||||
myTestHelper.assertNothingChanged();
|
||||
} else {
|
||||
myTestHelper.assertAllReferencesUpdated(withDelete);
|
||||
myTestHelper.assertSourcePatientUpdatedOrDeleted(withDelete);
|
||||
myTestHelper.assertTargetPatientUpdated(withDelete, expectedIdentifiersOnTargetAfterMerge);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMerge_smallResourceLimit() {
|
||||
ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = new ReplaceReferencesTestHelper.PatientMergeInputParameters();
|
||||
myTestHelper.setSourceAndTarget(inParams);
|
||||
|
||||
inParams.resourceLimit = 5;
|
||||
Parameters inParameters = inParams.asParametersResource();
|
||||
|
||||
// exec
|
||||
assertThatThrownBy(() -> callMergeOperation(inParameters, false))
|
||||
.isInstanceOf(PreconditionFailedException.class)
|
||||
.satisfies(ex -> assertThat(extractFailureMessage((BaseServerResponseException) ex)).isEqualTo("HAPI-2597: Number of resources with references to "+ myTestHelper.getSourcePatientId() + " exceeds the resource-limit 5. Submit the request asynchronsly by adding the HTTP Header 'Prefer: respond-async'."));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMerge_SourceResourceCannotBeDeletedBecauseAnotherResourceReferencingSourceWasAddedWhileJobIsRunning_JobFails() {
|
||||
ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = new ReplaceReferencesTestHelper.PatientMergeInputParameters();
|
||||
myTestHelper.setSourceAndTarget(inParams);
|
||||
inParams.deleteSource = true;
|
||||
//using a small batch size that would result in multiple chunks to ensure that
|
||||
//the job runs a bit slowly so that we have sometime to add a resource that references the source
|
||||
//after the first step
|
||||
myStorageSettings.setDefaultTransactionEntriesForWrite(5);
|
||||
Parameters inParameters = inParams.asParametersResource();
|
||||
|
||||
// exec
|
||||
Parameters outParams = callMergeOperation(inParameters, true);
|
||||
Task task = (Task) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_TASK).getResource();
|
||||
assertNull(task.getIdElement().getVersionIdPart());
|
||||
ourLog.info("Got task {}", task.getId());
|
||||
String jobId = myTestHelper.getJobIdFromTask(task);
|
||||
|
||||
// wait for first step of the job to finish
|
||||
await()
|
||||
.until(() -> {
|
||||
myBatch2JobHelper.runMaintenancePass();
|
||||
String currentGatedStepId = myJobCoordinator.getInstance(jobId).getCurrentGatedStepId();
|
||||
return !"query-ids".equals(currentGatedStepId);
|
||||
});
|
||||
|
||||
Encounter enc = new Encounter();
|
||||
enc.setStatus(Encounter.EncounterStatus.ARRIVED);
|
||||
enc.getSubject().setReferenceElement(myTestHelper.getSourcePatientId());
|
||||
myEncounterDao.create(enc, mySrd);
|
||||
|
||||
myBatch2JobHelper.awaitJobFailure(jobId);
|
||||
|
||||
|
||||
Task taskAfterJobFailure = myTaskDao.read(task.getIdElement().toVersionless(), mySrd);
|
||||
assertThat(taskAfterJobFailure.getStatus()).isEqualTo(Task.TaskStatus.FAILED);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// withDelete, withInputResultPatient, withPreview
|
||||
"true, true, true",
|
||||
"true, false, true",
|
||||
"false, true, true",
|
||||
"false, false, true",
|
||||
"true, true, false",
|
||||
"true, false, false",
|
||||
"false, true, false",
|
||||
"false, false, false",
|
||||
})
|
||||
public void testMultipleTargetMatchesFails(boolean withDelete, boolean withInputResultPatient, boolean withPreview) {
|
||||
ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = myTestHelper.buildMultipleTargetMatchParameters(withDelete, withInputResultPatient, withPreview);
|
||||
|
||||
Parameters inParameters = inParams.asParametersResource();
|
||||
|
||||
assertUnprocessibleEntityWithMessage(inParameters, "Multiple resources found matching the identifier(s) specified in 'target-patient-identifier'");
|
||||
}
|
||||
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// withDelete, withInputResultPatient, withPreview
|
||||
"true, true, true",
|
||||
"true, false, true",
|
||||
"false, true, true",
|
||||
"false, false, true",
|
||||
"true, true, false",
|
||||
"true, false, false",
|
||||
"false, true, false",
|
||||
"false, false, false",
|
||||
})
|
||||
public void testMultipleSourceMatchesFails(boolean withDelete, boolean withInputResultPatient, boolean withPreview) {
|
||||
ReplaceReferencesTestHelper.PatientMergeInputParameters inParams = myTestHelper.buildMultipleSourceMatchParameters(withDelete, withInputResultPatient, withPreview);
|
||||
|
||||
Parameters inParameters = inParams.asParametersResource();
|
||||
|
||||
assertUnprocessibleEntityWithMessage(inParameters, "Multiple resources found matching the identifier(s) specified in 'source-patient-identifier'");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_MissingRequiredParameters_Returns400BadRequest() {
|
||||
assertThatThrownBy(() -> callMergeOperation(new Parameters())
|
||||
).isInstanceOf(InvalidRequestException.class)
|
||||
.extracting(InvalidRequestException.class::cast)
|
||||
.extracting(BaseServerResponseException::getStatusCode)
|
||||
.isEqualTo(400);
|
||||
}
|
||||
|
||||
private void assertUnprocessibleEntityWithMessage(Parameters inParameters, String theExpectedMessage) {
|
||||
assertThatThrownBy(() ->
|
||||
callMergeOperation(inParameters))
|
||||
.isInstanceOf(UnprocessableEntityException.class)
|
||||
.extracting(UnprocessableEntityException.class::cast)
|
||||
.extracting(this::extractFailureMessage)
|
||||
.isEqualTo(theExpectedMessage);
|
||||
}
|
||||
|
||||
private void callMergeOperation(Parameters inParameters) {
|
||||
this.callMergeOperation(inParameters, false);
|
||||
}
|
||||
|
||||
private Parameters callMergeOperation(Parameters inParameters, boolean isAsync) {
|
||||
IOperationUntypedWithInput<Parameters> request = myClient.operation()
|
||||
.onType("Patient")
|
||||
.named(OPERATION_MERGE)
|
||||
.withParameters(inParameters);
|
||||
|
||||
if (isAsync) {
|
||||
request.withAdditionalHeader(HEADER_PREFER, HEADER_PREFER_RESPOND_ASYNC);
|
||||
}
|
||||
|
||||
return request
|
||||
.returnResourceType(Parameters.class)
|
||||
.execute();
|
||||
}
|
||||
|
||||
class MyExceptionHandler implements TestExecutionExceptionHandler {
|
||||
@Override
|
||||
public void handleTestExecutionException(ExtensionContext theExtensionContext, Throwable theThrowable) throws Throwable {
|
||||
if (theThrowable instanceof BaseServerResponseException) {
|
||||
BaseServerResponseException ex = (BaseServerResponseException) theThrowable;
|
||||
String message = extractFailureMessage(ex);
|
||||
throw ex.getClass().getDeclaredConstructor(String.class, Throwable.class).newInstance(message, ex);
|
||||
}
|
||||
throw theThrowable;
|
||||
}
|
||||
}
|
||||
|
||||
private @Nonnull String extractFailureMessage(BaseServerResponseException ex) {
|
||||
String body = ex.getResponseBody();
|
||||
if (body != null) {
|
||||
Parameters outParams = myFhirContext.newJsonParser().parseResource(Parameters.class, body);
|
||||
OperationOutcome outcome = (OperationOutcome) outParams.getParameter(OPERATION_MERGE_OUTPUT_PARAM_OUTCOME).getResource();
|
||||
return outcome.getIssue().stream()
|
||||
.map(OperationOutcome.OperationOutcomeIssueComponent::getDiagnostics)
|
||||
.collect(Collectors.joining(", "));
|
||||
} else {
|
||||
return "null";
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean verboseClientLogging() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,160 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.replacereferences.ReplaceReferencesTestHelper;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.provider.ReplaceReferencesSvcImpl.RESOURCE_TYPES_SYSTEM;
|
||||
import static ca.uhn.fhir.jpa.replacereferences.ReplaceReferencesTestHelper.EXPECTED_SMALL_BATCHES;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class ReplaceReferencesR4Test extends BaseResourceProviderR4Test {
|
||||
ReplaceReferencesTestHelper myTestHelper;
|
||||
|
||||
@Override
|
||||
@AfterEach
|
||||
public void after() throws Exception {
|
||||
super.after();
|
||||
myStorageSettings.setDefaultTransactionEntriesForWrite(new JpaStorageSettings().getDefaultTransactionEntriesForWrite());
|
||||
}
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
|
||||
myTestHelper = new ReplaceReferencesTestHelper(myFhirContext, myDaoRegistry);
|
||||
myTestHelper.beforeEach();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {false, true})
|
||||
void testReplaceReferences(boolean isAsync) {
|
||||
// exec
|
||||
Parameters outParams = myTestHelper.callReplaceReferences(myClient, isAsync);
|
||||
|
||||
assertThat(outParams.getParameter()).hasSize(1);
|
||||
|
||||
Bundle patchResultBundle;
|
||||
if (isAsync) {
|
||||
assertThat(getLastHttpStatusCode()).isEqualTo(HttpServletResponse.SC_ACCEPTED);
|
||||
|
||||
Task task = (Task) outParams.getParameter(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK).getResource();
|
||||
assertNull(task.getIdElement().getVersionIdPart());
|
||||
ourLog.info("Got task {}", task.getId());
|
||||
|
||||
JobInstance jobInstance = awaitJobCompletion(task);
|
||||
|
||||
patchResultBundle = myTestHelper.validateCompletedTask(jobInstance, task.getIdElement());
|
||||
} else {
|
||||
patchResultBundle = (Bundle) outParams.getParameter(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME).getResource();
|
||||
}
|
||||
|
||||
// validate
|
||||
ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle,
|
||||
ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES, List.of(
|
||||
"Observation", "Encounter", "CarePlan"));
|
||||
|
||||
// Check that the linked resources were updated
|
||||
|
||||
myTestHelper.assertAllReferencesUpdated();
|
||||
}
|
||||
|
||||
private JobInstance awaitJobCompletion(Task task) {
|
||||
String jobId = myTestHelper.getJobIdFromTask(task);
|
||||
return myBatch2JobHelper.awaitJobCompletion(jobId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReplaceReferencesSmallResourceLimitSync() {
|
||||
assertThatThrownBy(() -> myTestHelper.callReplaceReferencesWithResourceLimit(myClient, false, ReplaceReferencesTestHelper.SMALL_BATCH_SIZE))
|
||||
.isInstanceOf(PreconditionFailedException.class)
|
||||
.hasMessage("HTTP 412 Precondition Failed: HAPI-2597: Number of resources with references to " + myTestHelper.getSourcePatientId() + " exceeds the resource-limit 5. Submit the request asynchronsly by adding the HTTP Header 'Prefer: respond-async'.");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReplaceReferencesSmallTransactionEntriesSize() {
|
||||
myStorageSettings.setDefaultTransactionEntriesForWrite(5);
|
||||
|
||||
// exec
|
||||
Parameters outParams = myTestHelper.callReplaceReferencesWithResourceLimit(myClient, true, ReplaceReferencesTestHelper.SMALL_BATCH_SIZE);
|
||||
|
||||
assertThat(getLastHttpStatusCode()).isEqualTo(HttpServletResponse.SC_ACCEPTED);
|
||||
|
||||
assertThat(outParams.getParameter()).hasSize(1);
|
||||
|
||||
Bundle patchResultBundle;
|
||||
Task task = (Task) outParams.getParameter(OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK).getResource();
|
||||
assertNull(task.getIdElement().getVersionIdPart());
|
||||
ourLog.info("Got task {}", task.getId());
|
||||
|
||||
awaitJobCompletion(task);
|
||||
|
||||
Task taskWithOutput = myTaskDao.read(task.getIdElement(), mySrd);
|
||||
ourLog.info("Complete Task: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(taskWithOutput));
|
||||
|
||||
assertThat(taskWithOutput.getOutput()).as("task " + task.getId() + " has size " + EXPECTED_SMALL_BATCHES).hasSize(EXPECTED_SMALL_BATCHES);
|
||||
List<Resource> containedResources = taskWithOutput.getContained();
|
||||
|
||||
assertThat(containedResources)
|
||||
.hasSize(EXPECTED_SMALL_BATCHES)
|
||||
.element(0)
|
||||
.isInstanceOf(Bundle.class);
|
||||
|
||||
int entriesLeft = ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES;
|
||||
for (int i = 1; i < EXPECTED_SMALL_BATCHES; i++) {
|
||||
|
||||
Task.TaskOutputComponent taskOutput = taskWithOutput.getOutput().get(i);
|
||||
|
||||
// Assert on the output type
|
||||
Coding taskType = taskOutput.getType().getCodingFirstRep();
|
||||
assertEquals(RESOURCE_TYPES_SYSTEM, taskType.getSystem());
|
||||
assertEquals("Bundle", taskType.getCode());
|
||||
|
||||
Bundle containedBundle = (Bundle) containedResources.get(i);
|
||||
|
||||
Reference outputRef = (Reference) taskOutput.getValue();
|
||||
patchResultBundle = (Bundle) outputRef.getResource();
|
||||
assertTrue(containedBundle.equalsDeep(patchResultBundle));
|
||||
|
||||
// validate
|
||||
entriesLeft -= ReplaceReferencesTestHelper.SMALL_BATCH_SIZE;
|
||||
int expectedNumberOfEntries = Math.min(entriesLeft, ReplaceReferencesTestHelper.SMALL_BATCH_SIZE);
|
||||
ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle, expectedNumberOfEntries, List.of("Observation",
|
||||
"Encounter", "CarePlan"));
|
||||
}
|
||||
|
||||
// Check that the linked resources were updated
|
||||
|
||||
myTestHelper.assertAllReferencesUpdated();
|
||||
}
|
||||
|
||||
// TODO ED we should add some tests for the invalid request error cases (and assert 4xx status code)
|
||||
|
||||
@Override
|
||||
protected boolean verboseClientLogging() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
package ca.uhn.fhir.jpa.replacereferences;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.test.Batch2JobHelper;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesAppCtx.JOB_REPLACE_REFERENCES;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
|
||||
public class ReplaceReferencesBatchTest extends BaseJpaR4Test {
|
||||
|
||||
@Autowired
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private Batch2JobHelper myBatch2JobHelper;
|
||||
|
||||
SystemRequestDetails mySrd = new SystemRequestDetails();
|
||||
|
||||
private ReplaceReferencesTestHelper myTestHelper;
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
|
||||
myTestHelper = new ReplaceReferencesTestHelper(myFhirContext, myDaoRegistry);
|
||||
myTestHelper.beforeEach();
|
||||
|
||||
mySrd.setRequestPartitionId(RequestPartitionId.allPartitions());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHappyPath() {
|
||||
IIdType taskId = createReplaceReferencesTask();
|
||||
|
||||
ReplaceReferencesJobParameters jobParams = new ReplaceReferencesJobParameters();
|
||||
jobParams.setSourceId(new FhirIdJson(myTestHelper.getSourcePatientId()));
|
||||
jobParams.setTargetId(new FhirIdJson(myTestHelper.getTargetPatientId()));
|
||||
jobParams.setTaskId(taskId);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest(JOB_REPLACE_REFERENCES, jobParams);
|
||||
Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(mySrd, request);
|
||||
JobInstance jobInstance = myBatch2JobHelper.awaitJobCompletion(jobStartResponse);
|
||||
|
||||
Bundle patchResultBundle = myTestHelper.validateCompletedTask(jobInstance, taskId);
|
||||
ReplaceReferencesTestHelper.validatePatchResultBundle(patchResultBundle, ReplaceReferencesTestHelper.TOTAL_EXPECTED_PATCHES, List.of(
|
||||
"Observation", "Encounter", "CarePlan"));
|
||||
|
||||
myTestHelper.assertAllReferencesUpdated();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testReplaceReferencesJob_JobFails_ErrorHandlerSetsAssociatedTaskStatusToFailed() {
|
||||
IIdType taskId = createReplaceReferencesTask();
|
||||
|
||||
ReplaceReferencesJobParameters jobParams = new ReplaceReferencesJobParameters();
|
||||
jobParams.setSourceId(new FhirIdJson(myTestHelper.getSourcePatientId()));
|
||||
//use a target that does not exist to force the job to fail
|
||||
jobParams.setTargetId(new FhirIdJson("Patient", "doesnotexist"));
|
||||
jobParams.setTaskId(taskId);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest(JOB_REPLACE_REFERENCES, jobParams);
|
||||
Batch2JobStartResponse jobStartResponse = myJobCoordinator.startInstance(mySrd, request);
|
||||
myBatch2JobHelper.awaitJobFailure(jobStartResponse);
|
||||
|
||||
await().until(() -> {
|
||||
myBatch2JobHelper.runMaintenancePass();
|
||||
return myTaskDao.read(taskId, mySrd).getStatus().equals(Task.TaskStatus.FAILED);
|
||||
});
|
||||
}
|
||||
|
||||
private IIdType createReplaceReferencesTask() {
|
||||
Task task = new Task();
|
||||
task.setStatus(Task.TaskStatus.INPROGRESS);
|
||||
return myTaskDao.create(task, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
}
|
|
@ -16,8 +16,14 @@
|
|||
<logger name="org.eclipse.jetty.websocket" level="info"/>
|
||||
<logger name="org.hibernate.event.internal.DefaultPersistEventListener" level="info"/>
|
||||
<logger name="org.eclipse" level="error"/>
|
||||
<logger name="ca.uhn.fhir.rest.client" level="info"/>
|
||||
<logger name="ca.uhn.fhir.rest.client" level="debug"/>
|
||||
|
||||
<!-- These logs can get pretty noisy -->
|
||||
<logger name="ca.uhn.fhir.jpa.dao" level="info"/>
|
||||
<logger name="org.thymeleaf" level="info"/>
|
||||
<logger name="ca.uhn.fhir.jpa.model.entity" level="info"/>
|
||||
<logger name="org.springframework" level="info"/>
|
||||
<logger name="org.hibernate" level="info"/>
|
||||
|
||||
<!-- set to debug to enable term expansion logs -->
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -25,13 +25,17 @@ import ca.uhn.fhir.batch2.jobs.reindex.ReindexProvider;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.dao.data.IPartitionDao;
|
||||
import ca.uhn.fhir.jpa.graphql.GraphQLProvider;
|
||||
import ca.uhn.fhir.jpa.provider.merge.PatientMergeProvider;
|
||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.narrative.DefaultThymeleafNarrativeGenerator;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.client.api.IClientInterceptor;
|
||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||
import ca.uhn.fhir.rest.client.api.IHttpRequest;
|
||||
import ca.uhn.fhir.rest.client.api.IHttpResponse;
|
||||
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
|
||||
import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
|
||||
import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor;
|
||||
|
@ -74,6 +78,8 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
@RegisterExtension
|
||||
protected RestfulServerExtension myServer;
|
||||
|
||||
private MyHttpCodeClientIntercepter myLastHttpResponseCodeCapture = new MyHttpCodeClientIntercepter();
|
||||
|
||||
@RegisterExtension
|
||||
protected RestfulServerConfigurerExtension myServerConfigurer = new RestfulServerConfigurerExtension(() -> myServer)
|
||||
.withServerBeforeAll(s -> {
|
||||
|
@ -94,6 +100,7 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
s.registerProvider(myAppCtx.getBean(SubscriptionTriggeringProvider.class));
|
||||
s.registerProvider(myAppCtx.getBean(TerminologyUploaderProvider.class));
|
||||
s.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class));
|
||||
s.registerProvider(myAppCtx.getBean(PatientMergeProvider.class));
|
||||
|
||||
s.setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class));
|
||||
|
||||
|
@ -127,8 +134,10 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
|
||||
myClient.getInterceptorService().unregisterInterceptorsIf(t -> t instanceof LoggingInterceptor);
|
||||
if (shouldLogClient()) {
|
||||
myClient.registerInterceptor(new LoggingInterceptor());
|
||||
myClient.registerInterceptor(new LoggingInterceptor(verboseClientLogging()));
|
||||
}
|
||||
|
||||
myClient.registerInterceptor(myLastHttpResponseCodeCapture);
|
||||
});
|
||||
|
||||
@Autowired
|
||||
|
@ -157,6 +166,10 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
return true;
|
||||
}
|
||||
|
||||
protected boolean verboseClientLogging() {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected List<String> toNameList(Bundle resp) {
|
||||
List<String> names = new ArrayList<>();
|
||||
for (BundleEntryComponent next : resp.getEntry()) {
|
||||
|
@ -172,6 +185,10 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
return names;
|
||||
}
|
||||
|
||||
protected int getLastHttpStatusCode() {
|
||||
return myLastHttpResponseCodeCapture.getLastHttpStatusCode();
|
||||
}
|
||||
|
||||
public static int getNumberOfParametersByName(Parameters theParameters, String theName) {
|
||||
int retVal = 0;
|
||||
|
||||
|
@ -241,4 +258,21 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
|
|||
|
||||
return ids;
|
||||
}
|
||||
|
||||
private class MyHttpCodeClientIntercepter implements IClientInterceptor {
|
||||
|
||||
private int myLastHttpStatusCode;
|
||||
|
||||
@Override
|
||||
public void interceptRequest(IHttpRequest theRequest) {}
|
||||
|
||||
@Override
|
||||
public void interceptResponse(IHttpResponse theResponse) throws IOException {
|
||||
myLastHttpStatusCode = theResponse.getStatus();
|
||||
}
|
||||
|
||||
public int getLastHttpStatusCode() {
|
||||
return myLastHttpStatusCode;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,483 @@
|
|||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server Test Utilities
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.replacereferences;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceResultsJson;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
|
||||
import ca.uhn.fhir.jpa.api.dao.PatientEverythingParameters;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||
import ca.uhn.fhir.rest.gclient.IOperationUntypedWithInputAndPartialOutput;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.CarePlan;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.IntegerType;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.OperationOutcome;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.hl7.fhir.r4.model.Type;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.provider.ReplaceReferencesSvcImpl.RESOURCE_TYPES_SYSTEM;
|
||||
import static ca.uhn.fhir.rest.api.Constants.HEADER_PREFER;
|
||||
import static ca.uhn.fhir.rest.api.Constants.HEADER_PREFER_RESPOND_ASYNC;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.HAPI_BATCH_JOB_ID_SYSTEM;
|
||||
import static ca.uhn.fhir.rest.server.provider.ProviderConstants.OPERATION_REPLACE_REFERENCES;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class ReplaceReferencesTestHelper {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ReplaceReferencesTestHelper.class);
|
||||
|
||||
static final Identifier pat1IdentifierA =
|
||||
new Identifier().setSystem("SYS1A").setValue("VAL1A");
|
||||
static final Identifier pat1IdentifierB =
|
||||
new Identifier().setSystem("SYS1B").setValue("VAL1B");
|
||||
static final Identifier pat2IdentifierA =
|
||||
new Identifier().setSystem("SYS2A").setValue("VAL2A");
|
||||
static final Identifier pat2IdentifierB =
|
||||
new Identifier().setSystem("SYS2B").setValue("VAL2B");
|
||||
static final Identifier patBothIdentifierC =
|
||||
new Identifier().setSystem("SYSC").setValue("VALC");
|
||||
public static final int TOTAL_EXPECTED_PATCHES = 23;
|
||||
public static final int SMALL_BATCH_SIZE = 5;
|
||||
public static final int EXPECTED_SMALL_BATCHES = (TOTAL_EXPECTED_PATCHES + SMALL_BATCH_SIZE - 1) / SMALL_BATCH_SIZE;
|
||||
private final IFhirResourceDaoPatient<Patient> myPatientDao;
|
||||
private final IFhirResourceDao<Task> myTaskDao;
|
||||
private final IFhirResourceDao<Organization> myOrganizationDao;
|
||||
private final IFhirResourceDao<Encounter> myEncounterDao;
|
||||
private final IFhirResourceDao<CarePlan> myCarePlanDao;
|
||||
private final IFhirResourceDao<Observation> myObservationDao;
|
||||
|
||||
private IIdType myOrgId;
|
||||
private IIdType mySourcePatientId;
|
||||
private IIdType mySourceCarePlanId;
|
||||
private IIdType mySourceEncId1;
|
||||
private IIdType mySourceEncId2;
|
||||
private ArrayList<IIdType> mySourceObsIds;
|
||||
private IIdType myTargetPatientId;
|
||||
private IIdType myTargetEnc1;
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final SystemRequestDetails mySrd = new SystemRequestDetails();
|
||||
|
||||
public ReplaceReferencesTestHelper(FhirContext theFhirContext, DaoRegistry theDaoRegistry) {
|
||||
myFhirContext = theFhirContext;
|
||||
myPatientDao = (IFhirResourceDaoPatient<Patient>) theDaoRegistry.getResourceDao(Patient.class);
|
||||
myTaskDao = theDaoRegistry.getResourceDao(Task.class);
|
||||
myOrganizationDao = theDaoRegistry.getResourceDao(Organization.class);
|
||||
myEncounterDao = theDaoRegistry.getResourceDao(Encounter.class);
|
||||
myCarePlanDao = theDaoRegistry.getResourceDao(CarePlan.class);
|
||||
myObservationDao = theDaoRegistry.getResourceDao(Observation.class);
|
||||
}
|
||||
|
||||
public void beforeEach() throws Exception {
|
||||
|
||||
Organization org = new Organization();
|
||||
org.setName("an org");
|
||||
myOrgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless();
|
||||
ourLog.info("OrgId: {}", myOrgId);
|
||||
|
||||
Patient patient1 = new Patient();
|
||||
patient1.getManagingOrganization().setReferenceElement(myOrgId);
|
||||
patient1.addIdentifier(pat1IdentifierA);
|
||||
patient1.addIdentifier(pat1IdentifierB);
|
||||
patient1.addIdentifier(patBothIdentifierC);
|
||||
mySourcePatientId = myPatientDao.create(patient1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Patient patient2 = new Patient();
|
||||
patient2.addIdentifier(pat2IdentifierA);
|
||||
patient2.addIdentifier(pat2IdentifierB);
|
||||
patient2.addIdentifier(patBothIdentifierC);
|
||||
patient2.getManagingOrganization().setReferenceElement(myOrgId);
|
||||
myTargetPatientId = myPatientDao.create(patient2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Encounter enc1 = new Encounter();
|
||||
enc1.setStatus(Encounter.EncounterStatus.CANCELLED);
|
||||
enc1.getSubject().setReferenceElement(mySourcePatientId);
|
||||
enc1.getServiceProvider().setReferenceElement(myOrgId);
|
||||
mySourceEncId1 = myEncounterDao.create(enc1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Encounter enc2 = new Encounter();
|
||||
enc2.setStatus(Encounter.EncounterStatus.ARRIVED);
|
||||
enc2.getSubject().setReferenceElement(mySourcePatientId);
|
||||
enc2.getServiceProvider().setReferenceElement(myOrgId);
|
||||
mySourceEncId2 = myEncounterDao.create(enc2, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
CarePlan carePlan = new CarePlan();
|
||||
carePlan.setStatus(CarePlan.CarePlanStatus.ACTIVE);
|
||||
carePlan.getSubject().setReferenceElement(mySourcePatientId);
|
||||
mySourceCarePlanId = myCarePlanDao.create(carePlan, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Encounter targetEnc1 = new Encounter();
|
||||
targetEnc1.setStatus(Encounter.EncounterStatus.ARRIVED);
|
||||
targetEnc1.getSubject().setReferenceElement(myTargetPatientId);
|
||||
targetEnc1.getServiceProvider().setReferenceElement(myOrgId);
|
||||
this.myTargetEnc1 = myEncounterDao.create(targetEnc1, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
mySourceObsIds = new ArrayList<>();
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Observation obs = new Observation();
|
||||
obs.getSubject().setReferenceElement(mySourcePatientId);
|
||||
obs.setStatus(Observation.ObservationStatus.FINAL);
|
||||
IIdType obsId = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
mySourceObsIds.add(obsId);
|
||||
}
|
||||
}
|
||||
|
||||
public void setSourceAndTarget(PatientMergeInputParameters inParams) {
|
||||
inParams.sourcePatient = new Reference().setReferenceElement(mySourcePatientId);
|
||||
inParams.targetPatient = new Reference().setReferenceElement(myTargetPatientId);
|
||||
}
|
||||
|
||||
public Patient createResultPatient(boolean theDeleteSource) {
|
||||
Patient resultPatient = new Patient();
|
||||
resultPatient.setIdElement((IdType) myTargetPatientId);
|
||||
resultPatient.addIdentifier(pat1IdentifierA);
|
||||
if (!theDeleteSource) {
|
||||
// add the link only if we are not deleting the source
|
||||
Patient.PatientLinkComponent link = resultPatient.addLink();
|
||||
link.setOther(new Reference(mySourcePatientId));
|
||||
link.setType(Patient.LinkType.REPLACES);
|
||||
}
|
||||
return resultPatient;
|
||||
}
|
||||
|
||||
public Patient readSourcePatient() {
|
||||
return myPatientDao.read(mySourcePatientId, mySrd);
|
||||
}
|
||||
|
||||
public Patient readTargetPatient() {
|
||||
return myPatientDao.read(myTargetPatientId, mySrd);
|
||||
}
|
||||
|
||||
public IIdType getTargetPatientId() {
|
||||
return myTargetPatientId;
|
||||
}
|
||||
|
||||
private Set<IIdType> getTargetEverythingResourceIds() {
|
||||
PatientEverythingParameters everythingParams = new PatientEverythingParameters();
|
||||
everythingParams.setCount(new IntegerType(100));
|
||||
|
||||
IBundleProvider bundleProvider =
|
||||
myPatientDao.patientInstanceEverything(null, mySrd, everythingParams, myTargetPatientId);
|
||||
|
||||
assertNull(bundleProvider.getNextPageId());
|
||||
|
||||
return bundleProvider.getAllResources().stream()
|
||||
.map(IBaseResource::getIdElement)
|
||||
.map(IIdType::toUnqualifiedVersionless)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
public String getJobIdFromTask(Task task) {
|
||||
assertThat(task.getIdentifier())
|
||||
.hasSize(1)
|
||||
.element(0)
|
||||
.extracting(Identifier::getSystem)
|
||||
.isEqualTo(HAPI_BATCH_JOB_ID_SYSTEM);
|
||||
|
||||
return task.getIdentifierFirstRep().getValue();
|
||||
}
|
||||
|
||||
public Parameters callReplaceReferences(IGenericClient theFhirClient, boolean theIsAsync) {
|
||||
return callReplaceReferencesWithResourceLimit(theFhirClient, theIsAsync, null);
|
||||
}
|
||||
|
||||
public Parameters callReplaceReferencesWithResourceLimit(
|
||||
IGenericClient theFhirClient, boolean theIsAsync, Integer theResourceLimit) {
|
||||
IOperationUntypedWithInputAndPartialOutput<Parameters> request = theFhirClient
|
||||
.operation()
|
||||
.onServer()
|
||||
.named(OPERATION_REPLACE_REFERENCES)
|
||||
.withParameter(
|
||||
Parameters.class,
|
||||
ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID,
|
||||
new StringType(mySourcePatientId.getValue()))
|
||||
.andParameter(
|
||||
ProviderConstants.OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID,
|
||||
new StringType(myTargetPatientId.getValue()));
|
||||
if (theResourceLimit != null) {
|
||||
request.andParameter(
|
||||
ProviderConstants.OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT, new IntegerType(theResourceLimit));
|
||||
}
|
||||
|
||||
if (theIsAsync) {
|
||||
request.withAdditionalHeader(HEADER_PREFER, HEADER_PREFER_RESPOND_ASYNC);
|
||||
}
|
||||
|
||||
return request.returnResourceType(Parameters.class).execute();
|
||||
}
|
||||
|
||||
public void assertAllReferencesUpdated() {
|
||||
assertAllReferencesUpdated(false);
|
||||
}
|
||||
|
||||
public void assertAllReferencesUpdated(boolean theWithDelete) {
|
||||
|
||||
Set<IIdType> actual = getTargetEverythingResourceIds();
|
||||
|
||||
ourLog.info("Found IDs: {}", actual);
|
||||
|
||||
if (theWithDelete) {
|
||||
assertThat(actual).doesNotContain(mySourcePatientId);
|
||||
}
|
||||
assertThat(actual).contains(mySourceEncId1);
|
||||
assertThat(actual).contains(mySourceEncId2);
|
||||
assertThat(actual).contains(myOrgId);
|
||||
assertThat(actual).contains(mySourceCarePlanId);
|
||||
assertThat(actual).containsAll(mySourceObsIds);
|
||||
assertThat(actual).contains(myTargetPatientId);
|
||||
assertThat(actual).contains(myTargetEnc1);
|
||||
}
|
||||
|
||||
public void assertNothingChanged() {
|
||||
Set<IIdType> actual = getTargetEverythingResourceIds();
|
||||
|
||||
ourLog.info("Found IDs: {}", actual);
|
||||
|
||||
assertThat(actual).doesNotContain(mySourcePatientId);
|
||||
assertThat(actual).doesNotContain(mySourceEncId1);
|
||||
assertThat(actual).doesNotContain(mySourceEncId2);
|
||||
assertThat(actual).contains(myOrgId);
|
||||
assertThat(actual).doesNotContain(mySourceCarePlanId);
|
||||
assertThat(actual).doesNotContainAnyElementsOf(mySourceObsIds);
|
||||
assertThat(actual).contains(myTargetPatientId);
|
||||
assertThat(actual).contains(myTargetEnc1);
|
||||
|
||||
// TODO ED should we also assert here that source still has the all references it had before the operation,
|
||||
// that is in addition to the validation that target doesn't contain the references.
|
||||
}
|
||||
|
||||
public PatientMergeInputParameters buildMultipleTargetMatchParameters(
|
||||
boolean theWithDelete, boolean theWithInputResultPatient, boolean theWithPreview) {
|
||||
PatientMergeInputParameters inParams = new PatientMergeInputParameters();
|
||||
inParams.sourcePatient = new Reference().setReferenceElement(mySourcePatientId);
|
||||
inParams.targetPatientIdentifier = patBothIdentifierC;
|
||||
inParams.deleteSource = theWithDelete;
|
||||
if (theWithInputResultPatient) {
|
||||
inParams.resultPatient = createResultPatient(theWithDelete);
|
||||
}
|
||||
if (theWithPreview) {
|
||||
inParams.preview = true;
|
||||
}
|
||||
return inParams;
|
||||
}
|
||||
|
||||
public PatientMergeInputParameters buildMultipleSourceMatchParameters(
|
||||
boolean theWithDelete, boolean theWithInputResultPatient, boolean theWithPreview) {
|
||||
PatientMergeInputParameters inParams = new PatientMergeInputParameters();
|
||||
inParams.sourcePatientIdentifier = patBothIdentifierC;
|
||||
inParams.targetPatient = new Reference().setReferenceElement(mySourcePatientId);
|
||||
inParams.deleteSource = theWithDelete;
|
||||
if (theWithInputResultPatient) {
|
||||
inParams.resultPatient = createResultPatient(theWithDelete);
|
||||
}
|
||||
if (theWithPreview) {
|
||||
inParams.preview = true;
|
||||
}
|
||||
return inParams;
|
||||
}
|
||||
|
||||
public IIdType getSourcePatientId() {
|
||||
return mySourcePatientId;
|
||||
}
|
||||
|
||||
public static class PatientMergeInputParameters {
|
||||
public Type sourcePatient;
|
||||
public Type sourcePatientIdentifier;
|
||||
public Type targetPatient;
|
||||
public Type targetPatientIdentifier;
|
||||
public Patient resultPatient;
|
||||
public Boolean preview;
|
||||
public Boolean deleteSource;
|
||||
public Integer resourceLimit;
|
||||
|
||||
public Parameters asParametersResource() {
|
||||
Parameters inParams = new Parameters();
|
||||
if (sourcePatient != null) {
|
||||
inParams.addParameter().setName("source-patient").setValue(sourcePatient);
|
||||
}
|
||||
if (sourcePatientIdentifier != null) {
|
||||
inParams.addParameter().setName("source-patient-identifier").setValue(sourcePatientIdentifier);
|
||||
}
|
||||
if (targetPatient != null) {
|
||||
inParams.addParameter().setName("target-patient").setValue(targetPatient);
|
||||
}
|
||||
if (targetPatientIdentifier != null) {
|
||||
inParams.addParameter().setName("target-patient-identifier").setValue(targetPatientIdentifier);
|
||||
}
|
||||
if (resultPatient != null) {
|
||||
inParams.addParameter().setName("result-patient").setResource(resultPatient);
|
||||
}
|
||||
if (preview != null) {
|
||||
inParams.addParameter().setName("preview").setValue(new BooleanType(preview));
|
||||
}
|
||||
if (deleteSource != null) {
|
||||
inParams.addParameter().setName("delete-source").setValue(new BooleanType(deleteSource));
|
||||
}
|
||||
if (resourceLimit != null) {
|
||||
inParams.addParameter().setName("batch-size").setValue(new IntegerType(resourceLimit));
|
||||
}
|
||||
return inParams;
|
||||
}
|
||||
}
|
||||
|
||||
public static void validatePatchResultBundle(
|
||||
Bundle patchResultBundle, int theTotalExpectedPatches, List<String> theExpectedResourceTypes) {
|
||||
String resourceMatchString = "(" + String.join("|", theExpectedResourceTypes) + ")";
|
||||
Pattern expectedPatchIssuePattern =
|
||||
Pattern.compile("Successfully patched resource \"" + resourceMatchString + "/\\d+/_history/\\d+\".");
|
||||
assertThat(patchResultBundle.getEntry())
|
||||
.hasSize(theTotalExpectedPatches)
|
||||
.allSatisfy(entry -> assertThat(entry.getResponse().getOutcome())
|
||||
.isInstanceOf(OperationOutcome.class)
|
||||
.extracting(OperationOutcome.class::cast)
|
||||
.extracting(OperationOutcome::getIssue)
|
||||
.satisfies(issues -> assertThat(issues)
|
||||
.hasSize(1)
|
||||
.element(0)
|
||||
.extracting(OperationOutcome.OperationOutcomeIssueComponent::getDiagnostics)
|
||||
.satisfies(
|
||||
diagnostics -> assertThat(diagnostics).matches(expectedPatchIssuePattern))));
|
||||
}
|
||||
|
||||
public Bundle validateCompletedTask(JobInstance theJobInstance, IIdType theTaskId) {
|
||||
validateJobReport(theJobInstance, theTaskId);
|
||||
|
||||
Bundle patchResultBundle;
|
||||
Task taskWithOutput = myTaskDao.read(theTaskId, mySrd);
|
||||
assertThat(taskWithOutput.getStatus()).isEqualTo(Task.TaskStatus.COMPLETED);
|
||||
ourLog.info(
|
||||
"Complete Task: {}",
|
||||
myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(taskWithOutput));
|
||||
|
||||
Task.TaskOutputComponent taskOutput = taskWithOutput.getOutputFirstRep();
|
||||
|
||||
// Assert on the output type
|
||||
Coding taskType = taskOutput.getType().getCodingFirstRep();
|
||||
assertEquals(RESOURCE_TYPES_SYSTEM, taskType.getSystem());
|
||||
assertEquals("Bundle", taskType.getCode());
|
||||
|
||||
List<Resource> containedResources = taskWithOutput.getContained();
|
||||
assertThat(containedResources).hasSize(1).element(0).isInstanceOf(Bundle.class);
|
||||
|
||||
Bundle containedBundle = (Bundle) containedResources.get(0);
|
||||
|
||||
Reference outputRef = (Reference) taskOutput.getValue();
|
||||
patchResultBundle = (Bundle) outputRef.getResource();
|
||||
// ourLog.info("containedBundle: {}",
|
||||
// myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(containedBundle));
|
||||
// ourLog.info("patchResultBundle: {}",
|
||||
// myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(patchResultBundle));
|
||||
assertTrue(containedBundle.equalsDeep(patchResultBundle));
|
||||
return patchResultBundle;
|
||||
}
|
||||
|
||||
private void validateJobReport(JobInstance theJobInstance, IIdType theTaskId) {
|
||||
String report = theJobInstance.getReport();
|
||||
ReplaceReferenceResultsJson replaceReferenceResultsJson =
|
||||
JsonUtil.deserialize(report, ReplaceReferenceResultsJson.class);
|
||||
IdDt resultTaskId = replaceReferenceResultsJson.getTaskId().asIdDt();
|
||||
assertEquals(theTaskId.getIdPart(), resultTaskId.getIdPart());
|
||||
}
|
||||
|
||||
public List<Identifier> getExpectedIdentifiersForTargetAfterMerge(boolean theWithInputResultPatient) {
|
||||
|
||||
List<Identifier> expectedIdentifiersOnTargetAfterMerge = null;
|
||||
if (theWithInputResultPatient) {
|
||||
expectedIdentifiersOnTargetAfterMerge =
|
||||
List.of(new Identifier().setSystem("SYS1A").setValue("VAL1A"));
|
||||
} else {
|
||||
// the identifiers copied over from source should be marked as old
|
||||
expectedIdentifiersOnTargetAfterMerge = List.of(
|
||||
new Identifier().setSystem("SYS2A").setValue("VAL2A"),
|
||||
new Identifier().setSystem("SYS2B").setValue("VAL2B"),
|
||||
new Identifier().setSystem("SYSC").setValue("VALC"),
|
||||
new Identifier().setSystem("SYS1A").setValue("VAL1A").copy().setUse(Identifier.IdentifierUse.OLD),
|
||||
new Identifier().setSystem("SYS1B").setValue("VAL1B").copy().setUse(Identifier.IdentifierUse.OLD));
|
||||
}
|
||||
return expectedIdentifiersOnTargetAfterMerge;
|
||||
}
|
||||
|
||||
public void assertSourcePatientUpdatedOrDeleted(boolean withDelete) {
|
||||
if (withDelete) {
|
||||
assertThrows(ResourceGoneException.class, () -> readSourcePatient());
|
||||
} else {
|
||||
Patient source = readSourcePatient();
|
||||
assertThat(source.getLink()).hasSize(1);
|
||||
Patient.PatientLinkComponent link = source.getLink().get(0);
|
||||
assertThat(link.getOther().getReferenceElement()).isEqualTo(getTargetPatientId());
|
||||
assertThat(link.getType()).isEqualTo(Patient.LinkType.REPLACEDBY);
|
||||
}
|
||||
}
|
||||
|
||||
public void assertTargetPatientUpdated(boolean withDelete, List<Identifier> theExpectedIdentifiers) {
|
||||
Patient target = readTargetPatient();
|
||||
if (!withDelete) {
|
||||
assertThat(target.getLink()).hasSize(1);
|
||||
Patient.PatientLinkComponent link = target.getLink().get(0);
|
||||
assertThat(link.getOther().getReferenceElement()).isEqualTo(getSourcePatientId());
|
||||
assertThat(link.getType()).isEqualTo(Patient.LinkType.REPLACES);
|
||||
}
|
||||
// assertExpected Identifiers found on the target
|
||||
assertIdentifiers(target.getIdentifier(), theExpectedIdentifiers);
|
||||
}
|
||||
|
||||
public void assertIdentifiers(List<Identifier> theActualIdentifiers, List<Identifier> theExpectedIdentifiers) {
|
||||
assertThat(theActualIdentifiers).hasSize(theExpectedIdentifiers.size());
|
||||
for (int i = 0; i < theExpectedIdentifiers.size(); i++) {
|
||||
Identifier expectedIdentifier = theExpectedIdentifiers.get(i);
|
||||
Identifier actualIdentifier = theActualIdentifiers.get(i);
|
||||
assertThat(expectedIdentifier.equalsDeep(actualIdentifier)).isTrue();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.test;
|
|||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.api.IJobMaintenanceService;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.merge.MergeAppCtx;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesAppCtx;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
|
@ -224,7 +226,9 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {
|
||||
TestR4Config.class
|
||||
TestR4Config.class,
|
||||
ReplaceReferencesAppCtx.class, // Batch job
|
||||
MergeAppCtx.class // Batch job
|
||||
})
|
||||
public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuilder {
|
||||
public static final String MY_VALUE_SET = "my-value-set";
|
||||
|
|
|
@ -80,11 +80,11 @@ public class TestR5Config {
|
|||
* and catch any potential deadlocks caused by database connection
|
||||
* starvation
|
||||
*
|
||||
* A minimum of 2 is necessary for most transactions,
|
||||
* so 2 will be our limit
|
||||
* A minimum of 3 is necessary for most transactions,
|
||||
* so 3 will be our minimum
|
||||
*/
|
||||
if (ourMaxThreads == null) {
|
||||
ourMaxThreads = (int) (Math.random() * 6.0) + 2;
|
||||
ourMaxThreads = (int) (Math.random() * 6.0) + 3;
|
||||
|
||||
if (HapiTestSystemProperties.isSingleDbConnectionEnabled()) {
|
||||
ourMaxThreads = 1;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -22,7 +22,6 @@ package ca.uhn.fhir.mdm.util;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.mdm.model.CanonicalEID;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.util.CanonicalIdentifier;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
|
||||
|
@ -31,23 +30,7 @@ public final class IdentifierUtil {
|
|||
private IdentifierUtil() {}
|
||||
|
||||
public static CanonicalIdentifier identifierDtFromIdentifier(IBase theIdentifier) {
|
||||
CanonicalIdentifier retval = new CanonicalIdentifier();
|
||||
|
||||
// TODO add other fields like "use" etc
|
||||
if (theIdentifier instanceof org.hl7.fhir.dstu3.model.Identifier) {
|
||||
org.hl7.fhir.dstu3.model.Identifier ident = (org.hl7.fhir.dstu3.model.Identifier) theIdentifier;
|
||||
retval.setSystem(ident.getSystem()).setValue(ident.getValue());
|
||||
} else if (theIdentifier instanceof org.hl7.fhir.r4.model.Identifier) {
|
||||
org.hl7.fhir.r4.model.Identifier ident = (org.hl7.fhir.r4.model.Identifier) theIdentifier;
|
||||
retval.setSystem(ident.getSystem()).setValue(ident.getValue());
|
||||
} else if (theIdentifier instanceof org.hl7.fhir.r5.model.Identifier) {
|
||||
org.hl7.fhir.r5.model.Identifier ident = (org.hl7.fhir.r5.model.Identifier) theIdentifier;
|
||||
retval.setSystem(ident.getSystem()).setValue(ident.getValue());
|
||||
} else {
|
||||
throw new InternalErrorException(Msg.code(1486) + "Expected 'Identifier' type but was '"
|
||||
+ theIdentifier.getClass().getName() + "'");
|
||||
}
|
||||
return retval;
|
||||
return ca.uhn.fhir.util.CanonicalIdentifier.fromIdentifier(theIdentifier);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -23,9 +23,11 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.PreferHeader;
|
||||
import ca.uhn.fhir.rest.api.RequestTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.IRestfulServerDefaults;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
|
@ -609,4 +611,10 @@ public abstract class RequestDetails {
|
|||
public void setRetry(boolean theRetry) {
|
||||
myRetry = theRetry;
|
||||
}
|
||||
|
||||
public boolean isPreferAsync() {
|
||||
String prefer = getHeader(Constants.HEADER_PREFER);
|
||||
PreferHeader preferHeader = RestfulServerUtils.parsePreferHeader(prefer);
|
||||
return preferHeader.getRespondAsync();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,6 +78,13 @@ public class SystemRequestDetails extends RequestDetails {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO KHS use this everywhere we create a srd with only one partition
|
||||
public static SystemRequestDetails forRequestPartitionId(RequestPartitionId thePartitionId) {
|
||||
SystemRequestDetails retVal = new SystemRequestDetails();
|
||||
retVal.setRequestPartitionId(thePartitionId);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public RequestPartitionId getRequestPartitionId() {
|
||||
return myRequestPartitionId;
|
||||
}
|
||||
|
|
|
@ -791,6 +791,17 @@ public class RestfulServerUtils {
|
|||
|
||||
@Nonnull
|
||||
public static PreferHeader parsePreferHeader(IRestfulServer<?> theServer, String theValue) {
|
||||
PreferHeader retVal = parsePreferHeader(theValue);
|
||||
|
||||
if (retVal.getReturn() == null && theServer != null && theServer.getDefaultPreferReturn() != null) {
|
||||
retVal.setReturn(theServer.getDefaultPreferReturn());
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static PreferHeader parsePreferHeader(String theValue) {
|
||||
PreferHeader retVal = new PreferHeader();
|
||||
|
||||
if (isNotBlank(theValue)) {
|
||||
|
@ -825,11 +836,6 @@ public class RestfulServerUtils {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (retVal.getReturn() == null && theServer != null && theServer.getDefaultPreferReturn() != null) {
|
||||
retVal.setReturn(theServer.getDefaultPreferReturn());
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
|
|
@ -277,16 +277,27 @@ public abstract class BaseResourceReturningMethodBinding extends BaseMethodBindi
|
|||
When we write directly to an HttpServletResponse, the invocation returns null. However, we still want to invoke
|
||||
the SERVER_OUTGOING_RESPONSE pointcut.
|
||||
*/
|
||||
|
||||
// if the response status code is set by the method, respect it. Otherwise, use the default 200.
|
||||
int responseCode = Constants.STATUS_HTTP_200_OK;
|
||||
if (theRequest instanceof ServletRequestDetails) {
|
||||
HttpServletResponse servletResponse = ((ServletRequestDetails) theRequest).getServletResponse();
|
||||
if (servletResponse != null && servletResponse.getStatus() > 0) {
|
||||
responseCode = servletResponse.getStatus();
|
||||
}
|
||||
}
|
||||
|
||||
if (response == null) {
|
||||
ResponseDetails responseDetails = new ResponseDetails();
|
||||
responseDetails.setResponseCode(Constants.STATUS_HTTP_200_OK);
|
||||
responseDetails.setResponseCode(responseCode);
|
||||
callOutgoingResponseHook(theRequest, responseDetails);
|
||||
return null;
|
||||
} else {
|
||||
Set<SummaryEnum> summaryMode = RestfulServerUtils.determineSummaryMode(theRequest);
|
||||
ResponseDetails responseDetails = new ResponseDetails();
|
||||
responseDetails.setResponseResource(response);
|
||||
responseDetails.setResponseCode(Constants.STATUS_HTTP_200_OK);
|
||||
responseDetails.setResponseCode(responseCode);
|
||||
|
||||
if (!callOutgoingResponseHook(theRequest, responseDetails)) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -132,7 +132,7 @@ public class ProviderConstants {
|
|||
public static final String OPERATION_META = "$meta";
|
||||
|
||||
/**
|
||||
* Operation name for the $expunge operation
|
||||
* Operation name for the $expunge operation
|
||||
*/
|
||||
public static final String OPERATION_EXPUNGE = "$expunge";
|
||||
|
||||
|
@ -253,10 +253,50 @@ public class ProviderConstants {
|
|||
/**
|
||||
* Parameter for source reference of the "$replace-references" operation
|
||||
*/
|
||||
public static final String PARAM_SOURCE_REFERENCE_ID = "sourceReferenceId";
|
||||
public static final String OPERATION_REPLACE_REFERENCES_PARAM_SOURCE_REFERENCE_ID = "source-reference-id";
|
||||
|
||||
/**
|
||||
* Parameter for target reference of the "$replace-references" operation
|
||||
*/
|
||||
public static final String PARAM_TARGET_REFERENCE_ID = "targetReferenceId";
|
||||
public static final String OPERATION_REPLACE_REFERENCES_PARAM_TARGET_REFERENCE_ID = "target-reference-id";
|
||||
|
||||
/**
|
||||
* If the request is being performed synchronously and the number of resources that need to change
|
||||
* exceeds this amount, the operation will fail with 412 Precondition Failed.
|
||||
*/
|
||||
public static final String OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT = "resource-limit";
|
||||
|
||||
/**
|
||||
* $replace-references output Parameters names
|
||||
*/
|
||||
public static final String OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK = "task";
|
||||
|
||||
public static final String OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME = "outcome";
|
||||
|
||||
/**
|
||||
* Operation name for the Resource "$merge" operation
|
||||
* Hapi-fhir use is based on https://www.hl7.org/fhir/patient-operation-merge.html
|
||||
*/
|
||||
public static final String OPERATION_MERGE = "$merge";
|
||||
/**
|
||||
* Patient $merge operation parameters
|
||||
*/
|
||||
public static final String OPERATION_MERGE_PARAM_SOURCE_PATIENT = "source-patient";
|
||||
|
||||
public static final String OPERATION_MERGE_PARAM_SOURCE_PATIENT_IDENTIFIER = "source-patient-identifier";
|
||||
public static final String OPERATION_MERGE_PARAM_TARGET_PATIENT = "target-patient";
|
||||
public static final String OPERATION_MERGE_PARAM_TARGET_PATIENT_IDENTIFIER = "target-patient-identifier";
|
||||
public static final String OPERATION_MERGE_PARAM_RESULT_PATIENT = "result-patient";
|
||||
public static final String OPERATION_MERGE_PARAM_BATCH_SIZE = "batch-size";
|
||||
public static final String OPERATION_MERGE_PARAM_PREVIEW = "preview";
|
||||
public static final String OPERATION_MERGE_PARAM_DELETE_SOURCE = "delete-source";
|
||||
public static final String OPERATION_MERGE_OUTPUT_PARAM_INPUT = "input";
|
||||
public static final String OPERATION_MERGE_OUTPUT_PARAM_OUTCOME = OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_OUTCOME;
|
||||
public static final String OPERATION_MERGE_OUTPUT_PARAM_RESULT = "result";
|
||||
public static final String OPERATION_MERGE_OUTPUT_PARAM_TASK = OPERATION_REPLACE_REFERENCES_OUTPUT_PARAM_TASK;
|
||||
|
||||
public static final String HAPI_BATCH_JOB_ID_SYSTEM = "http://hapifhir.io/batch/jobId";
|
||||
public static final String OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT_STRING = "512";
|
||||
public static final Integer OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT =
|
||||
Integer.parseInt(OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT_STRING);
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -21,7 +21,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-caching-api</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.15-SNAPSHOT</version>
|
||||
<version>7.7.16-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -37,6 +37,6 @@ import org.springframework.context.annotation.Import;
|
|||
DeleteExpungeAppCtx.class,
|
||||
BulkExportAppCtx.class,
|
||||
TermCodeSystemJobConfig.class,
|
||||
BulkImportPullConfig.class,
|
||||
BulkImportPullConfig.class
|
||||
})
|
||||
public class Batch2JobsConfig {}
|
||||
|
|
|
@ -30,7 +30,6 @@ import ca.uhn.fhir.rest.api.server.bulk.BulkExportJobParameters;
|
|||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Scope;
|
||||
|
||||
@Configuration
|
||||
public class BulkExportAppCtx {
|
||||
|
@ -145,7 +144,6 @@ public class BulkExportAppCtx {
|
|||
}
|
||||
|
||||
@Bean
|
||||
@Scope("prototype")
|
||||
public BulkExportCreateReportStep createReportStep() {
|
||||
return new BulkExportCreateReportStep();
|
||||
}
|
||||
|
|
|
@ -47,6 +47,22 @@ public class BulkExportCreateReportStep
|
|||
|
||||
private Map<String, List<String>> myResourceToBinaryIds;
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public ChunkOutcome consume(
|
||||
ChunkExecutionDetails<BulkExportJobParameters, BulkExportBinaryFileId> theChunkDetails) {
|
||||
BulkExportBinaryFileId fileId = theChunkDetails.getData();
|
||||
if (myResourceToBinaryIds == null) {
|
||||
myResourceToBinaryIds = new HashMap<>();
|
||||
}
|
||||
|
||||
myResourceToBinaryIds.putIfAbsent(fileId.getResourceType(), new ArrayList<>());
|
||||
|
||||
myResourceToBinaryIds.get(fileId.getResourceType()).add(fileId.getBinaryId());
|
||||
|
||||
return ChunkOutcome.SUCCESS();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public RunOutcome run(
|
||||
|
@ -79,22 +95,6 @@ public class BulkExportCreateReportStep
|
|||
return RunOutcome.SUCCESS;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public ChunkOutcome consume(
|
||||
ChunkExecutionDetails<BulkExportJobParameters, BulkExportBinaryFileId> theChunkDetails) {
|
||||
BulkExportBinaryFileId fileId = theChunkDetails.getData();
|
||||
if (myResourceToBinaryIds == null) {
|
||||
myResourceToBinaryIds = new HashMap<>();
|
||||
}
|
||||
|
||||
myResourceToBinaryIds.putIfAbsent(fileId.getResourceType(), new ArrayList<>());
|
||||
|
||||
myResourceToBinaryIds.get(fileId.getResourceType()).add(fileId.getBinaryId());
|
||||
|
||||
return ChunkOutcome.SUCCESS();
|
||||
}
|
||||
|
||||
private static String getOriginatingRequestUrl(
|
||||
@Nonnull StepExecutionDetails<BulkExportJobParameters, BulkExportBinaryFileId> theStepExecutionDetails,
|
||||
BulkExportJobResults results) {
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.merge;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.chunk.FhirIdListWorkChunkJson;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencePatchOutcomeJson;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceResultsJson;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceUpdateStep;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesErrorHandler;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesQueryIdsStep;
|
||||
import ca.uhn.fhir.batch2.model.JobDefinition;
|
||||
import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class MergeAppCtx {
|
||||
public static final String JOB_MERGE = "MERGE";
|
||||
|
||||
@Bean
|
||||
public JobDefinition<MergeJobParameters> merge(
|
||||
ReplaceReferencesQueryIdsStep<MergeJobParameters> theMergeQueryIds,
|
||||
ReplaceReferenceUpdateStep<MergeJobParameters> theMergeUpdateStep,
|
||||
MergeUpdateTaskReducerStep theMergeUpdateTaskReducerStep,
|
||||
ReplaceReferencesErrorHandler<MergeJobParameters> theMergeErrorHandler) {
|
||||
return JobDefinition.newBuilder()
|
||||
.setJobDefinitionId(JOB_MERGE)
|
||||
.setJobDescription("Merge Resources")
|
||||
.setJobDefinitionVersion(1)
|
||||
.gatedExecution()
|
||||
.setParametersType(MergeJobParameters.class)
|
||||
.addFirstStep(
|
||||
"query-ids",
|
||||
"Query IDs of resources that link to the source resource",
|
||||
FhirIdListWorkChunkJson.class,
|
||||
theMergeQueryIds)
|
||||
.addIntermediateStep(
|
||||
"replace-references",
|
||||
"Update all references from pointing to source to pointing to target",
|
||||
ReplaceReferencePatchOutcomeJson.class,
|
||||
theMergeUpdateStep)
|
||||
.addFinalReducerStep(
|
||||
"update-task",
|
||||
"Waits for replace reference work to complete and updates Task.",
|
||||
ReplaceReferenceResultsJson.class,
|
||||
theMergeUpdateTaskReducerStep)
|
||||
.errorHandler(theMergeErrorHandler)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ReplaceReferencesQueryIdsStep<MergeJobParameters> mergeQueryIdsStep(
|
||||
HapiTransactionService theHapiTransactionService, IBatch2DaoSvc theBatch2DaoSvc) {
|
||||
return new ReplaceReferencesQueryIdsStep<>(theHapiTransactionService, theBatch2DaoSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ReplaceReferenceUpdateStep<MergeJobParameters> mergeUpdateStep(
|
||||
FhirContext theFhirContext, ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundleSvc) {
|
||||
return new ReplaceReferenceUpdateStep<>(theFhirContext, theReplaceReferencesPatchBundleSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MergeUpdateTaskReducerStep mergeUpdateTaskStep(
|
||||
DaoRegistry theDaoRegistry, IHapiTransactionService theHapiTransactionService) {
|
||||
return new MergeUpdateTaskReducerStep(theDaoRegistry, theHapiTransactionService);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ReplaceReferencesErrorHandler<MergeJobParameters> mergeErrorHandler(
|
||||
DaoRegistry theDaoRegistry, Batch2TaskHelper theBatch2TaskHelper) {
|
||||
IFhirResourceDao<Task> taskDao = theDaoRegistry.getResourceDao(Task.class);
|
||||
return new ReplaceReferencesErrorHandler<>(theBatch2TaskHelper, taskDao);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.merge;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencesJobParameters;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public class MergeJobParameters extends ReplaceReferencesJobParameters {
|
||||
@JsonProperty("deleteSource")
|
||||
private boolean myDeleteSource;
|
||||
|
||||
@JsonProperty("resultResource")
|
||||
private String myResultResource;
|
||||
|
||||
public void setResultResource(String theResultResource) {
|
||||
myResultResource = theResultResource;
|
||||
}
|
||||
|
||||
public String getResultResource() {
|
||||
return myResultResource;
|
||||
}
|
||||
|
||||
public boolean getDeleteSource() {
|
||||
return myDeleteSource;
|
||||
}
|
||||
|
||||
public void setDeleteSource(boolean theDeleteSource) {
|
||||
this.myDeleteSource = theDeleteSource;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,187 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.merge;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.Identifier;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
||||
|
||||
/**
|
||||
* This class contains code that is used to update source and target resources after the references are replaced.
|
||||
* This is the common functionality that is used in sync case and in the async case as the reduction step.
|
||||
*/
|
||||
public class MergeResourceHelper {
|
||||
|
||||
private final IFhirResourceDao<Patient> myPatientDao;
|
||||
|
||||
public MergeResourceHelper(IFhirResourceDao<Patient> theDao) {
|
||||
myPatientDao = theDao;
|
||||
}
|
||||
|
||||
public static int setResourceLimitFromParameter(
|
||||
JpaStorageSettings theStorageSettings, IPrimitiveType<Integer> theResourceLimit) {
|
||||
int retval = defaultIfNull(
|
||||
IPrimitiveType.toValueOrNull(theResourceLimit),
|
||||
ProviderConstants.OPERATION_REPLACE_REFERENCES_RESOURCE_LIMIT_DEFAULT);
|
||||
if (retval > theStorageSettings.getMaxTransactionEntriesForWrite()) {
|
||||
retval = theStorageSettings.getMaxTransactionEntriesForWrite();
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
public void updateMergedResourcesAfterReferencesReplaced(
|
||||
IHapiTransactionService myHapiTransactionService,
|
||||
IIdType theSourceResourceId,
|
||||
IIdType theTargetResourceId,
|
||||
@Nullable Patient theResultResource,
|
||||
boolean theDeleteSource,
|
||||
RequestDetails theRequestDetails) {
|
||||
Patient sourceResource = myPatientDao.read(theSourceResourceId, theRequestDetails);
|
||||
Patient targetResource = myPatientDao.read(theTargetResourceId, theRequestDetails);
|
||||
|
||||
updateMergedResourcesAfterReferencesReplaced(
|
||||
myHapiTransactionService,
|
||||
sourceResource,
|
||||
targetResource,
|
||||
theResultResource,
|
||||
theDeleteSource,
|
||||
theRequestDetails);
|
||||
}
|
||||
|
||||
public Patient updateMergedResourcesAfterReferencesReplaced(
|
||||
IHapiTransactionService myHapiTransactionService,
|
||||
Patient theSourceResource,
|
||||
Patient theTargetResource,
|
||||
@Nullable Patient theResultResource,
|
||||
boolean theDeleteSource,
|
||||
RequestDetails theRequestDetails) {
|
||||
|
||||
AtomicReference<Patient> targetPatientAfterUpdate = new AtomicReference<>();
|
||||
myHapiTransactionService.withRequest(theRequestDetails).execute(() -> {
|
||||
Patient patientToUpdate = prepareTargetPatientForUpdate(
|
||||
theTargetResource, theSourceResource, theResultResource, theDeleteSource);
|
||||
|
||||
targetPatientAfterUpdate.set(updateResource(patientToUpdate, theRequestDetails));
|
||||
|
||||
if (theDeleteSource) {
|
||||
deleteResource(theSourceResource, theRequestDetails);
|
||||
} else {
|
||||
prepareSourcePatientForUpdate(theSourceResource, theTargetResource);
|
||||
updateResource(theSourceResource, theRequestDetails);
|
||||
}
|
||||
});
|
||||
|
||||
return targetPatientAfterUpdate.get();
|
||||
}
|
||||
|
||||
public Patient prepareTargetPatientForUpdate(
|
||||
Patient theTargetResource,
|
||||
Patient theSourceResource,
|
||||
@Nullable Patient theResultResource,
|
||||
boolean theDeleteSource) {
|
||||
|
||||
// if the client provided a result resource as input then use it to update the target resource
|
||||
if (theResultResource != null) {
|
||||
return theResultResource;
|
||||
}
|
||||
|
||||
// client did not provide a result resource, we should update the target resource,
|
||||
// add the replaces link to the target resource, if the source resource is not to be deleted
|
||||
if (!theDeleteSource) {
|
||||
theTargetResource
|
||||
.addLink()
|
||||
.setType(Patient.LinkType.REPLACES)
|
||||
.setOther(new Reference(theSourceResource.getIdElement().toVersionless()));
|
||||
}
|
||||
|
||||
// copy all identifiers from the source to the target
|
||||
copyIdentifiersAndMarkOld(theSourceResource, theTargetResource);
|
||||
|
||||
return theTargetResource;
|
||||
}
|
||||
|
||||
private void prepareSourcePatientForUpdate(Patient theSourceResource, Patient theTargetResource) {
|
||||
theSourceResource.setActive(false);
|
||||
theSourceResource
|
||||
.addLink()
|
||||
.setType(Patient.LinkType.REPLACEDBY)
|
||||
.setOther(new Reference(theTargetResource.getIdElement().toVersionless()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies each identifier from theSourceResource to theTargetResource, after checking that theTargetResource does
|
||||
* not already contain the source identifier. Marks the copied identifiers marked as old.
|
||||
*
|
||||
* @param theSourceResource the source resource to copy identifiers from
|
||||
* @param theTargetResource the target resource to copy identifiers to
|
||||
*/
|
||||
private void copyIdentifiersAndMarkOld(Patient theSourceResource, Patient theTargetResource) {
|
||||
if (theSourceResource.hasIdentifier()) {
|
||||
List<Identifier> sourceIdentifiers = theSourceResource.getIdentifier();
|
||||
List<Identifier> targetIdentifiers = theTargetResource.getIdentifier();
|
||||
for (Identifier sourceIdentifier : sourceIdentifiers) {
|
||||
if (!containsIdentifier(targetIdentifiers, sourceIdentifier)) {
|
||||
Identifier copyOfSrcIdentifier = sourceIdentifier.copy();
|
||||
copyOfSrcIdentifier.setUse(Identifier.IdentifierUse.OLD);
|
||||
theTargetResource.addIdentifier(copyOfSrcIdentifier);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if theIdentifiers contains theIdentifier using equalsDeep
|
||||
*
|
||||
* @param theIdentifiers the list of identifiers
|
||||
* @param theIdentifier the identifier to check
|
||||
* @return true if theIdentifiers contains theIdentifier, false otherwise
|
||||
*/
|
||||
private boolean containsIdentifier(List<Identifier> theIdentifiers, Identifier theIdentifier) {
|
||||
for (Identifier identifier : theIdentifiers) {
|
||||
if (identifier.equalsDeep(theIdentifier)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private Patient updateResource(Patient theResource, RequestDetails theRequestDetails) {
|
||||
DaoMethodOutcome outcome = myPatientDao.update(theResource, theRequestDetails);
|
||||
return (Patient) outcome.getResource();
|
||||
}
|
||||
|
||||
private void deleteResource(Patient theResource, RequestDetails theRequestDetails) {
|
||||
myPatientDao.delete(theResource.getIdElement(), theRequestDetails);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.merge;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferencePatchOutcomeJson;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceResultsJson;
|
||||
import ca.uhn.fhir.batch2.jobs.replacereferences.ReplaceReferenceUpdateTaskReducerStep;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
|
||||
public class MergeUpdateTaskReducerStep extends ReplaceReferenceUpdateTaskReducerStep<MergeJobParameters> {
|
||||
private final IHapiTransactionService myHapiTransactionService;
|
||||
|
||||
public MergeUpdateTaskReducerStep(DaoRegistry theDaoRegistry, IHapiTransactionService theHapiTransactionService) {
|
||||
super(theDaoRegistry);
|
||||
this.myHapiTransactionService = theHapiTransactionService;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public RunOutcome run(
|
||||
@Nonnull StepExecutionDetails<MergeJobParameters, ReplaceReferencePatchOutcomeJson> theStepExecutionDetails,
|
||||
@Nonnull IJobDataSink<ReplaceReferenceResultsJson> theDataSink)
|
||||
throws JobExecutionFailedException {
|
||||
|
||||
MergeJobParameters mergeJobParameters = theStepExecutionDetails.getParameters();
|
||||
SystemRequestDetails requestDetails =
|
||||
SystemRequestDetails.forRequestPartitionId(mergeJobParameters.getPartitionId());
|
||||
|
||||
Patient resultResource = null;
|
||||
if (mergeJobParameters.getResultResource() != null) {
|
||||
resultResource =
|
||||
myFhirContext.newJsonParser().parseResource(Patient.class, mergeJobParameters.getResultResource());
|
||||
}
|
||||
|
||||
IFhirResourceDao<Patient> patientDao = myDaoRegistry.getResourceDao(Patient.class);
|
||||
|
||||
MergeResourceHelper helper = new MergeResourceHelper(patientDao);
|
||||
|
||||
helper.updateMergedResourcesAfterReferencesReplaced(
|
||||
myHapiTransactionService,
|
||||
mergeJobParameters.getSourceId().asIdDt(),
|
||||
mergeJobParameters.getTargetId().asIdDt(),
|
||||
resultResource,
|
||||
mergeJobParameters.getDeleteSource(),
|
||||
requestDetails);
|
||||
|
||||
return super.run(theStepExecutionDetails, theDataSink);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.replacereferences;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
|
||||
public class ReplaceReferencePatchOutcomeJson implements IModelJson {
|
||||
@JsonProperty("patchResponseBundle")
|
||||
String myPatchResponseBundle;
|
||||
|
||||
public ReplaceReferencePatchOutcomeJson() {}
|
||||
|
||||
public ReplaceReferencePatchOutcomeJson(FhirContext theFhirContext, Bundle theResult) {
|
||||
myPatchResponseBundle = theFhirContext.newJsonParser().encodeResourceToString(theResult);
|
||||
}
|
||||
|
||||
public String getPatchResponseBundle() {
|
||||
return myPatchResponseBundle;
|
||||
}
|
||||
|
||||
public void setPatchResponseBundle(String thePatchResponseBundle) {
|
||||
myPatchResponseBundle = thePatchResponseBundle;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.replacereferences;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
|
||||
import ca.uhn.fhir.model.api.IModelJson;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public class ReplaceReferenceResultsJson implements IModelJson {
|
||||
@JsonProperty("taskId")
|
||||
private FhirIdJson myTaskId;
|
||||
|
||||
public ReplaceReferenceResultsJson() {}
|
||||
|
||||
public void setTaskId(FhirIdJson theTaskId) {
|
||||
myTaskId = theTaskId;
|
||||
}
|
||||
|
||||
public FhirIdJson getTaskId() {
|
||||
return myTaskId;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.replacereferences;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.IJobStepWorker;
|
||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.jobs.chunk.FhirIdJson;
|
||||
import ca.uhn.fhir.batch2.jobs.chunk.FhirIdListWorkChunkJson;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesRequest;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ReplaceReferenceUpdateStep<PT extends ReplaceReferencesJobParameters>
|
||||
implements IJobStepWorker<PT, FhirIdListWorkChunkJson, ReplaceReferencePatchOutcomeJson> {
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final ReplaceReferencesPatchBundleSvc myReplaceReferencesPatchBundleSvc;
|
||||
|
||||
public ReplaceReferenceUpdateStep(
|
||||
FhirContext theFhirContext, ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundleSvc) {
|
||||
myFhirContext = theFhirContext;
|
||||
myReplaceReferencesPatchBundleSvc = theReplaceReferencesPatchBundleSvc;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public RunOutcome run(
|
||||
@Nonnull StepExecutionDetails<PT, FhirIdListWorkChunkJson> theStepExecutionDetails,
|
||||
@Nonnull IJobDataSink<ReplaceReferencePatchOutcomeJson> theDataSink)
|
||||
throws JobExecutionFailedException {
|
||||
|
||||
ReplaceReferencesJobParameters params = theStepExecutionDetails.getParameters();
|
||||
ReplaceReferencesRequest replaceReferencesRequest = params.asReplaceReferencesRequest();
|
||||
List<IdDt> fhirIds = theStepExecutionDetails.getData().getFhirIds().stream()
|
||||
.map(FhirIdJson::asIdDt)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.forRequestPartitionId(params.getPartitionId());
|
||||
|
||||
Bundle result = myReplaceReferencesPatchBundleSvc.patchReferencingResources(
|
||||
replaceReferencesRequest, fhirIds, requestDetails);
|
||||
|
||||
ReplaceReferencePatchOutcomeJson data = new ReplaceReferencePatchOutcomeJson(myFhirContext, result);
|
||||
theDataSink.accept(data);
|
||||
|
||||
return new RunOutcome(result.getEntry().size());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.replacereferences;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.ChunkExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
||||
import ca.uhn.fhir.batch2.api.IReductionStepWorker;
|
||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
||||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
||||
import ca.uhn.fhir.batch2.model.ChunkOutcome;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class ReplaceReferenceUpdateTaskReducerStep<PT extends ReplaceReferencesJobParameters>
|
||||
implements IReductionStepWorker<PT, ReplaceReferencePatchOutcomeJson, ReplaceReferenceResultsJson> {
|
||||
public static final String RESOURCE_TYPES_SYSTEM = "http://hl7.org/fhir/ValueSet/resource-types";
|
||||
|
||||
protected final FhirContext myFhirContext;
|
||||
protected final DaoRegistry myDaoRegistry;
|
||||
private final IFhirResourceDao<Task> myTaskDao;
|
||||
|
||||
private List<Bundle> myPatchOutputBundles = new ArrayList<>();
|
||||
|
||||
public ReplaceReferenceUpdateTaskReducerStep(DaoRegistry theDaoRegistry) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
myTaskDao = myDaoRegistry.getResourceDao(Task.class);
|
||||
myFhirContext = theDaoRegistry.getFhirContext();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public ChunkOutcome consume(ChunkExecutionDetails<PT, ReplaceReferencePatchOutcomeJson> theChunkDetails) {
|
||||
ReplaceReferencePatchOutcomeJson result = theChunkDetails.getData();
|
||||
Bundle patchOutputBundle =
|
||||
myFhirContext.newJsonParser().parseResource(Bundle.class, result.getPatchResponseBundle());
|
||||
myPatchOutputBundles.add(patchOutputBundle);
|
||||
return ChunkOutcome.SUCCESS();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public RunOutcome run(
|
||||
@Nonnull StepExecutionDetails<PT, ReplaceReferencePatchOutcomeJson> theStepExecutionDetails,
|
||||
@Nonnull IJobDataSink<ReplaceReferenceResultsJson> theDataSink)
|
||||
throws JobExecutionFailedException {
|
||||
|
||||
try {
|
||||
ReplaceReferencesJobParameters params = theStepExecutionDetails.getParameters();
|
||||
SystemRequestDetails requestDetails = SystemRequestDetails.forRequestPartitionId(params.getPartitionId());
|
||||
Task task = myTaskDao.read(params.getTaskId().asIdDt(), requestDetails);
|
||||
|
||||
task.setStatus(Task.TaskStatus.COMPLETED);
|
||||
// TODO KHS this Task will probably be too large for large jobs. Revisit this model once we support
|
||||
// Provenance
|
||||
// resources.
|
||||
myPatchOutputBundles.forEach(outputBundle -> {
|
||||
Task.TaskOutputComponent output = task.addOutput();
|
||||
Coding coding = output.getType().getCodingFirstRep();
|
||||
coding.setSystem(RESOURCE_TYPES_SYSTEM);
|
||||
coding.setCode("Bundle");
|
||||
Reference outputBundleReference =
|
||||
new Reference("#" + outputBundle.getIdElement().getIdPart());
|
||||
output.setValue(outputBundleReference);
|
||||
task.addContained(outputBundle);
|
||||
});
|
||||
|
||||
myTaskDao.update(task, requestDetails);
|
||||
|
||||
ReplaceReferenceResultsJson result = new ReplaceReferenceResultsJson();
|
||||
result.setTaskId(params.getTaskId());
|
||||
theDataSink.accept(result);
|
||||
|
||||
return new RunOutcome(myPatchOutputBundles.size());
|
||||
} finally {
|
||||
// Reusing the same reducer for all jobs feels confusing and dangerous to me. We need to fix this.
|
||||
// See https://github.com/hapifhir/hapi-fhir/pull/6551
|
||||
// TODO KHS add new methods to the api called init() and cleanup() that are called by the api so we can move
|
||||
// this finally block out
|
||||
myPatchOutputBundles.clear();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.replacereferences;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.chunk.FhirIdListWorkChunkJson;
|
||||
import ca.uhn.fhir.batch2.model.JobDefinition;
|
||||
import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.replacereferences.ReplaceReferencesPatchBundleSvc;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class ReplaceReferencesAppCtx {
|
||||
public static final String JOB_REPLACE_REFERENCES = "REPLACE_REFERENCES";
|
||||
|
||||
@Bean
|
||||
public JobDefinition<ReplaceReferencesJobParameters> replaceReferencesJobDefinition(
|
||||
ReplaceReferencesQueryIdsStep<ReplaceReferencesJobParameters> theReplaceReferencesQueryIds,
|
||||
ReplaceReferenceUpdateStep<ReplaceReferencesJobParameters> theReplaceReferenceUpdateStep,
|
||||
ReplaceReferenceUpdateTaskReducerStep<ReplaceReferencesJobParameters>
|
||||
theReplaceReferenceUpdateTaskReducerStep,
|
||||
ReplaceReferencesErrorHandler<ReplaceReferencesJobParameters> theReplaceReferencesErrorHandler) {
|
||||
return JobDefinition.newBuilder()
|
||||
.setJobDefinitionId(JOB_REPLACE_REFERENCES)
|
||||
.setJobDescription("Replace References")
|
||||
.setJobDefinitionVersion(1)
|
||||
.gatedExecution()
|
||||
.setParametersType(ReplaceReferencesJobParameters.class)
|
||||
.addFirstStep(
|
||||
"query-ids",
|
||||
"Query IDs of resources that link to the source resource",
|
||||
FhirIdListWorkChunkJson.class,
|
||||
theReplaceReferencesQueryIds)
|
||||
.addIntermediateStep(
|
||||
"replace-references",
|
||||
"Update all references from pointing to source to pointing to target",
|
||||
ReplaceReferencePatchOutcomeJson.class,
|
||||
theReplaceReferenceUpdateStep)
|
||||
.addFinalReducerStep(
|
||||
"update-task",
|
||||
"Waits for replace reference work to complete and updates Task.",
|
||||
ReplaceReferenceResultsJson.class,
|
||||
theReplaceReferenceUpdateTaskReducerStep)
|
||||
.errorHandler(theReplaceReferencesErrorHandler)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ReplaceReferencesQueryIdsStep<ReplaceReferencesJobParameters> replaceReferencesQueryIdsStep(
|
||||
HapiTransactionService theHapiTransactionService, IBatch2DaoSvc theBatch2DaoSvc) {
|
||||
return new ReplaceReferencesQueryIdsStep<>(theHapiTransactionService, theBatch2DaoSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ReplaceReferenceUpdateStep<ReplaceReferencesJobParameters> replaceReferenceUpdateStep(
|
||||
FhirContext theFhirContext, ReplaceReferencesPatchBundleSvc theReplaceReferencesPatchBundleSvc) {
|
||||
return new ReplaceReferenceUpdateStep<>(theFhirContext, theReplaceReferencesPatchBundleSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ReplaceReferenceUpdateTaskReducerStep<ReplaceReferencesJobParameters> replaceReferenceUpdateTaskStep(
|
||||
DaoRegistry theDaoRegistry) {
|
||||
return new ReplaceReferenceUpdateTaskReducerStep<>(theDaoRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ReplaceReferencesErrorHandler<ReplaceReferencesJobParameters> replaceReferencesErrorHandler(
|
||||
DaoRegistry theDaoRegistry, Batch2TaskHelper theBatch2TaskHelper) {
|
||||
IFhirResourceDao<Task> taskDao = theDaoRegistry.getResourceDao(Task.class);
|
||||
return new ReplaceReferencesErrorHandler<>(theBatch2TaskHelper, taskDao);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
/*-
|
||||
* #%L
|
||||
* hapi-fhir-storage-batch2-jobs
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.batch2.jobs.replacereferences;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobCompletionHandler;
|
||||
import ca.uhn.fhir.batch2.api.JobCompletionDetails;
|
||||
import ca.uhn.fhir.batch2.util.Batch2TaskHelper;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import org.hl7.fhir.r4.model.Task;
|
||||
|
||||
/**
|
||||
* This class is the error handler for ReplaceReferences and Merge jobs.
|
||||
* It updates the status of the associated task.
|
||||
*/
|
||||
public class ReplaceReferencesErrorHandler<PT extends ReplaceReferencesJobParameters>
|
||||
implements IJobCompletionHandler<PT> {
|
||||
|
||||
private final Batch2TaskHelper myBatch2TaskHelper;
|
||||
private final IFhirResourceDao<Task> myTaskDao;
|
||||
|
||||
public ReplaceReferencesErrorHandler(Batch2TaskHelper theBatch2TaskHelper, IFhirResourceDao<Task> theTaskDao) {
|
||||
myBatch2TaskHelper = theBatch2TaskHelper;
|
||||
myTaskDao = theTaskDao;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void jobComplete(JobCompletionDetails<PT> theDetails) {
|
||||
|
||||
PT jobParameters = theDetails.getParameters();
|
||||
|
||||
SystemRequestDetails requestDetails =
|
||||
SystemRequestDetails.forRequestPartitionId(jobParameters.getPartitionId());
|
||||
|
||||
myBatch2TaskHelper.updateTaskStatusOnJobCompletion(myTaskDao, requestDetails, theDetails);
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue