This commit is contained in:
justin.mckelvy 2023-05-15 16:25:16 -06:00
commit c2a8fb1946
223 changed files with 3374 additions and 1263 deletions

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -23,8 +23,6 @@ import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimaps;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import javax.annotation.Nonnull;
import java.util.Collection;
@ -136,8 +134,14 @@ public class HookParams {
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE)
.append("params", myParams)
.toString();
StringBuilder b = new StringBuilder();
myParams.forEach((key, value) -> {
b.append(" ")
.append(key.getSimpleName())
.append(": ")
.append(value)
.append("\n");
});
return b.toString();
}
}

View File

@ -123,7 +123,7 @@ public abstract class BasePrimitive<T> extends BaseIdentifiableElement implement
myStringValue = null;
} else {
// NB this might be null
myStringValue = encode(myCoercedValue);
myStringValue = encode(myCoercedValue);
}
}

View File

@ -30,6 +30,8 @@ import ca.uhn.fhir.util.DateUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -355,17 +357,21 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
if (myLowerBound == null || myLowerBound.getValue() == null) {
return null;
}
Date retVal = myLowerBound.getValue();
return getLowerBoundAsInstant(myLowerBound);
}
if (myLowerBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
@Nonnull
private static Date getLowerBoundAsInstant(@Nonnull DateParam theLowerBound) {
Date retVal = theLowerBound.getValue();
if (theLowerBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
retVal = DateUtils.getLowestInstantFromDate(retVal);
}
if (myLowerBound.getPrefix() != null) {
switch (myLowerBound.getPrefix()) {
if (theLowerBound.getPrefix() != null) {
switch (theLowerBound.getPrefix()) {
case GREATERTHAN:
case STARTS_AFTER:
retVal = myLowerBound.getPrecision().add(retVal, 1);
retVal = theLowerBound.getPrecision().add(retVal, 1);
break;
case EQUAL:
case NOT_EQUAL:
@ -375,7 +381,7 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
case APPROXIMATE:
case LESSTHAN_OR_EQUALS:
case ENDS_BEFORE:
throw new IllegalStateException(Msg.code(1928) + "Invalid lower bound comparator: " + myLowerBound.getPrefix());
throw new IllegalStateException(Msg.code(1928) + "Invalid lower bound comparator: " + theLowerBound.getPrefix());
}
}
return retVal;
@ -417,14 +423,19 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
return null;
}
Date retVal = myUpperBound.getValue();
return getUpperBoundAsInstant(myUpperBound);
}
if (myUpperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
@Nonnull
private static Date getUpperBoundAsInstant(@Nonnull DateParam theUpperBound) {
Date retVal = theUpperBound.getValue();
if (theUpperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
retVal = DateUtils.getHighestInstantFromDate(retVal);
}
if (myUpperBound.getPrefix() != null) {
switch (myUpperBound.getPrefix()) {
if (theUpperBound.getPrefix() != null) {
switch (theUpperBound.getPrefix()) {
case LESSTHAN:
case ENDS_BEFORE:
retVal = new Date(retVal.getTime() - 1L);
@ -432,14 +443,14 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
case EQUAL:
case NOT_EQUAL:
case LESSTHAN_OR_EQUALS:
retVal = myUpperBound.getPrecision().add(retVal, 1);
retVal = theUpperBound.getPrecision().add(retVal, 1);
retVal = new Date(retVal.getTime() - 1L);
break;
case GREATERTHAN_OR_EQUALS:
case GREATERTHAN:
case APPROXIMATE:
case STARTS_AFTER:
throw new IllegalStateException(Msg.code(1929) + "Invalid upper bound comparator: " + myUpperBound.getPrefix());
throw new IllegalStateException(Msg.code(1929) + "Invalid upper bound comparator: " + theUpperBound.getPrefix());
}
}
return retVal;
@ -626,12 +637,14 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
* are the same value. As such, even though the prefixes for the lower and
* upper bounds default to <code>ge</code> and <code>le</code> respectively,
* the resulting prefix is effectively <code>eq</code> where only a single
* date is provided - as required by the FHIR specificiation (i.e. "If no
* date is provided - as required by the FHIR specification (i.e. "If no
* prefix is present, the prefix <code>eq</code> is assumed").
*/
private void validateAndSet(DateParam lowerBound, DateParam upperBound) {
if (hasBound(lowerBound) && hasBound(upperBound)) {
if (lowerBound.getValue().getTime() > upperBound.getValue().getTime()) {
Date lowerBoundAsInstant = getLowerBoundAsInstant(lowerBound);
Date upperBoundAsInstant = getUpperBoundAsInstant(upperBound);
if (lowerBoundAsInstant.after(upperBoundAsInstant)) {
throw new DataFormatException(Msg.code(1932) + format(
"Lower bound of %s is after upper bound of %s",
lowerBound.getValueAsString(), upperBound.getValueAsString()));

View File

@ -249,6 +249,22 @@ public class BundleBuilder {
return new CreateBuilder(request);
}
/**
* Adds an entry containing a delete (DELETE) request.
* Also sets the Bundle.type value to "transaction" if it is not already set.
* <p>
* Note that the resource is only used to extract its ID and type, and the body of the resource is not included in the entry,
*
* @param theCondition The conditional URL, e.g. "Patient?identifier=foo|bar"
* @since 6.8.0
*/
public DeleteBuilder addTransactionDeleteConditionalEntry(String theCondition) {
Validate.notBlank(theCondition, "theCondition must not be blank");
setBundleField("type", "transaction");
return addDeleteEntry(theCondition);
}
/**
* Adds an entry containing a delete (DELETE) request.
* Also sets the Bundle.type value to "transaction" if it is not already set.

View File

@ -29,9 +29,10 @@ public class DateRangeUtil {
/**
* Narrow the DateRange to be within theStartInclusive, and theEndExclusive, if provided.
*
* @param theDateRangeParam the initial range, null for unconstrained
* @param theStartInclusive a lower bound to apply, or null for unchanged.
* @param theEndExclusive an upper bound to apply, or null for unchanged.
* @param theEndExclusive an upper bound to apply, or null for unchanged.
* @return a DateRange within the original range, and between theStartInclusive and theEnd
*/
@Nonnull
@ -39,16 +40,23 @@ public class DateRangeUtil {
if (theStartInclusive == null && theEndExclusive == null) {
return theDateRangeParam;
}
DateRangeParam result = theDateRangeParam==null?new DateRangeParam():new DateRangeParam(theDateRangeParam);
DateRangeParam result = theDateRangeParam == null ? new DateRangeParam() : new DateRangeParam(theDateRangeParam);
if (theStartInclusive != null) {
Date startInclusive = theStartInclusive;
if (startInclusive != null) {
Date inputStart = result.getLowerBoundAsInstant();
if (theDateRangeParam == null || inputStart == null || inputStart.before(theStartInclusive)) {
result.setLowerBoundInclusive(theStartInclusive);
Date upperBound = result.getUpperBoundAsInstant();
if (upperBound != null && upperBound.before(startInclusive)) {
startInclusive = upperBound;
}
if (theDateRangeParam == null || inputStart == null || inputStart.before(startInclusive)) {
result.setLowerBoundInclusive(startInclusive);
}
}
if (theEndExclusive != null) {
Date inputEnd = result.getUpperBound() == null? null : result.getUpperBound().getValue();
Date inputEnd = result.getUpperBound() == null ? null : result.getUpperBound().getValue();
if (theDateRangeParam == null || inputEnd == null || inputEnd.after(theEndExclusive)) {
result.setUpperBoundExclusive(theEndExclusive);
}

View File

@ -28,6 +28,8 @@ public class Logs {
private static final Logger ourSubscriptionTroubleshootingLog = LoggerFactory.getLogger("ca.cdr.log.subscription_troubleshooting");
private static final Logger ourSubscriptionTopicLog = LoggerFactory.getLogger("ca.uhn.fhir.log.subscription_topic_troubleshooting");
public static Logger getBatchTroubleshootingLog() {
return ourBatchTroubleshootingLog;
}
@ -39,4 +41,8 @@ public class Logs {
public static Logger getSubscriptionTroubleshootingLog() {
return ourSubscriptionTroubleshootingLog;
}
public static Logger getSubscriptionTopicLog() {
return ourSubscriptionTopicLog;
}
}

View File

@ -129,6 +129,15 @@ public class ParametersUtil {
return getParameterPartValue(theCtx, theParameter, theParameterName).map(t -> (IPrimitiveType<?>) t).map(t -> t.getValueAsString()).orElse(null);
}
public static Optional<Integer> getParameterPartValueAsInteger(FhirContext theCtx, IBase theParameter, String theParameterName) {
return getParameterPartValue(theCtx, theParameter, theParameterName)
.filter(t -> IPrimitiveType.class.isAssignableFrom(t.getClass()))
.map(t -> (IPrimitiveType<?>) t)
.map(IPrimitiveType::getValue)
.filter(t -> Integer.class.isAssignableFrom(t.getClass()))
.map(t -> (Integer) t);
}
private static <T> List<T> extractNamedParameters(FhirContext theCtx, IBaseParameters theParameters, String theParameterName, Function<IPrimitiveType<?>, T> theMapper) {
List<T> retVal = new ArrayList<>();

View File

@ -441,7 +441,10 @@ public final class TerserUtil {
private static void replaceField(FhirTerser theTerser, IBaseResource theFrom, IBaseResource theTo, BaseRuntimeChildDefinition childDefinition) {
List<IBase> fromValues = childDefinition.getAccessor().getValues(theFrom);
List<IBase> toValues = childDefinition.getAccessor().getValues(theTo);
if (fromValues != toValues) {
if (fromValues.isEmpty() && !toValues.isEmpty()) {
childDefinition.getMutator().setValue(theTo, null);
} else if (fromValues != toValues) {
clear(toValues);
mergeFields(theTerser, theTo, childDefinition, fromValues, toValues);
@ -529,7 +532,6 @@ public final class TerserUtil {
* Creates a new element taking into consideration elements with choice that are not directly retrievable by element
* name
*
*
* @param theFhirTerser
* @param theChildDefinition Child to create a new instance for
* @param theFromFieldValue The base parent field

View File

@ -19,15 +19,16 @@
*/
package org.hl7.fhir.instance.model.api;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.api.IElement;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.model.api.IElement;
import ca.uhn.fhir.model.api.Include;
/**
* For now, this is a simple marker interface indicating that a class is a resource type.
* There are two concrete types of implementations of this interrface. The first are
@ -58,4 +59,10 @@ public interface IBaseResource extends IBase, IElement {
FhirVersionEnum getStructureFhirVersionEnum();
/**
* @return <code>true</code> if this resource has been deleted
*/
default boolean isDeleted() {
return ResourceMetadataKeyEnum.DELETED_AT.get(this) != null;
}
}

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@ -59,6 +60,11 @@ class DateRangeUtilTest {
new DateParam(theResultStartPrefix, theResultStart), new DateParam(theResultEndPrefix, theResultEnd));
}
static NarrowCase from(String theMessage, DateRangeParam theRange, Date theNarrowStart, Date theNarrowEnd,
DateParam theResultStart, DateParam theResultEnd) {
return new NarrowCase(theMessage, theRange, theNarrowStart, theNarrowEnd, theResultStart, theResultEnd);
}
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE)
@ -89,8 +95,23 @@ class DateRangeUtilTest {
// half-open cases
NarrowCase.from("end inside open end", new DateRangeParam(dateTwo, null), null, dateFour, dateTwo, dateFour),
NarrowCase.from("start inside open start", new DateRangeParam(null, dateFour), dateTwo, null, GREATERTHAN_OR_EQUALS, dateTwo, LESSTHAN_OR_EQUALS, dateFour),
NarrowCase.from("gt case preserved", new DateRangeParam(new DateParam(GREATERTHAN, dateTwo), null), null, dateFour, GREATERTHAN, dateTwo, LESSTHAN, dateFour)
NarrowCase.from("gt case preserved", new DateRangeParam(new DateParam(GREATERTHAN, dateTwo), null), null, dateFour, GREATERTHAN, dateTwo, LESSTHAN, dateFour),
NarrowCase.from("lt date level precision date, narrow from is inside date",
new DateRangeParam(new DateParam(LESSTHAN, "2023-05-06")),
Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")),
Date.from(Instant.parse("2023-05-10T00:00:00.000+00:00")),
new DateParam(GREATERTHAN_OR_EQUALS, Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00"))),
new DateParam(LESSTHAN, "2023-05-06")
),
NarrowCase.from("gt date level precision date, narrow to is inside date",
new DateRangeParam(new DateParam(GREATERTHAN_OR_EQUALS, "2023-05-06")),
Date.from(Instant.parse("2023-05-01T00:00:00.000+00:00")),
Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")),
new DateParam(GREATERTHAN_OR_EQUALS, "2023-05-06"),
new DateParam(LESSTHAN, Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")))
)
);
}

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -2,8 +2,8 @@ package ca.uhn.fhir.cli;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
import ca.uhn.fhir.test.utilities.TlsAuthenticationTestHelper;
import ca.uhn.fhir.test.utilities.RestServerR4Helper;
import ca.uhn.fhir.test.utilities.TlsAuthenticationTestHelper;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.ParseException;
@ -29,7 +29,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
class ExampleDataUploaderTest {
@RegisterExtension
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper();
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newWithTransactionLatch();
@RegisterExtension
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
@ -46,7 +46,8 @@ class ExampleDataUploaderTest {
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testHeaderPassthrough(boolean theIncludeTls) throws ParseException {
public void testHeaderPassthrough(boolean theIncludeTls) throws ParseException, InterruptedException {
// setup
String headerKey = "test-header-key";
String headerValue = "test header value";
@ -60,8 +61,11 @@ class ExampleDataUploaderTest {
);
final CommandLine commandLine = new DefaultParser().parse(testedCommand.getOptions(), args, true);
testedCommand.run(commandLine);
// execute
myRestServerR4Helper.executeWithLatch(() -> runCommand(commandLine));
// validate
assertNotNull(myCapturingInterceptor.getLastRequest());
Map<String, List<String>> allHeaders = myCapturingInterceptor.getLastRequest().getAllHeaders();
assertFalse(allHeaders.isEmpty());
@ -78,6 +82,14 @@ class ExampleDataUploaderTest {
assertEquals("EX3152", resource.getIdElement().getIdPart());
}
private void runCommand(CommandLine commandLine) {
try {
testedCommand.run(commandLine);
} catch (ParseException e) {
throw new RuntimeException(e);
}
}
private static class RequestCapturingExampleDataUploader extends ExampleDataUploader {
private final CapturingInterceptor myCapturingInterceptor;

View File

@ -43,7 +43,7 @@ public class ExportConceptMapToCsvCommandDstu3Test {
}
@RegisterExtension
public final RestServerDstu3Helper myRestServerDstu3Helper = new RestServerDstu3Helper(true);
public final RestServerDstu3Helper myRestServerDstu3Helper = RestServerDstu3Helper.newInitialized();
@RegisterExtension
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();

View File

@ -40,7 +40,7 @@ public class ExportConceptMapToCsvCommandR4Test {
}
@RegisterExtension
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper(true);
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
@RegisterExtension
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();

View File

@ -45,7 +45,7 @@ public class ImportCsvToConceptMapCommandDstu3Test {
}
@RegisterExtension
public final RestServerDstu3Helper myRestServerDstu3Helper = new RestServerDstu3Helper(true);
public final RestServerDstu3Helper myRestServerDstu3Helper = RestServerDstu3Helper.newInitialized();
@RegisterExtension
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();

View File

@ -50,7 +50,7 @@ public class ImportCsvToConceptMapCommandR4Test {
@RegisterExtension
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper(true);
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
@RegisterExtension
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();

View File

@ -39,7 +39,7 @@ class ReindexTerminologyCommandTest {
private BaseJpaSystemProvider<?, ?> myProvider = spy(new BaseJpaSystemProvider<>() {});
@RegisterExtension
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper(true);
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
@RegisterExtension
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();

View File

@ -104,9 +104,9 @@ public class UploadTerminologyCommandTest {
}
@RegisterExtension
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper(true);
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
@RegisterExtension
public final RestServerDstu3Helper myRestServerDstu3Helper = new RestServerDstu3Helper(true);
public final RestServerDstu3Helper myRestServerDstu3Helper = RestServerDstu3Helper.newInitialized();
@RegisterExtension
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR - Docs
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.hapi.fhir.docs;
import ca.uhn.fhir.context.FhirContext;

View File

@ -1,4 +1,5 @@
---
type: add
issue: 4697
title: "Add providers for the operations available in the Clinical Reasoning package."
title: "Added DSTU3 and R4 support for the FHIR Clinical Reasoning module operations ActivityDefinition/$apply and PlanDefinition/$apply.
$apply allows for general workflow processing and is used in clinical decision support, prior authorization, quality reporting, and disease surveillance use cases."

View File

@ -0,0 +1,6 @@
---
type: add
issue: 4697
title: "Added DSTU3 and R4 support for the DaVinci Documentation Templates and Rules (DTR) Questionnaire/$questionnaire-package operation.
This operation allows a Questionnaire to be packaged as a Bundle with all the supporting resources that may be required for its use such as ValueSets and Libraries.
This operation is used in context of prior authorization."

View File

@ -0,0 +1,4 @@
---
type: add
issue: 4697
title: "Added R4 support for Questionnaire/$prepopulate and PlanDefinition/$package operations. These are operations are intended to support extended DaVinci DTR and SDC uses cases."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 4697
title: "Added DSTU3 and R4 support for the DaVinci Structured Data Capture (SDC) operations Questionnaire/$populate operation and QuestionnaireResponse/$extract.
These operations are used in data capture and exchange use cases, and are used by downstream specifications such as DaVinci Documentation Templates and Rules (DTR) for prior authorization."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4814
title: "A recent regression prevented the SQL Migrator from running on Oracle. This has been
corrected."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4812
title: "The tag being added on golden resources does not have a version, might as well add one."

View File

@ -0,0 +1,8 @@
---
type: change
issue: 4817
title: "Introduce IBaseResource.isDeleted() method and convert code to use it.
Add subscription_topic_troubleshooting log.
No longer rely on ResourceGoneException to detect deleted subscription. Instead use the new isDeleted() method.
Demote unexpected exceptions in HapiTransactionService from error to debug since these exceptions are expected
e.g. when checking if a resource has been deleted by catching a ResourceGoneException"

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 4831
title: "When performing a FHIR transaction containing both a conditional delete as well as a
conditional create/update for the same resource, the resource was left in an inconsistent
state. This has been corrected. Thanks to Laxman Singh for raising this issue."

View File

@ -0,0 +1,5 @@
---
type: perf
issue: 4831
title: "Conditional deletes that delete multiple resources at once have been optimized to perform
fewer SQL select statements, which should improve performance on large deletes."

View File

@ -0,0 +1,8 @@
---
type: fix
issue: 4838
title: "Two failures in the $delete-expunge operation were fixed:
<ul>
<li>Jobs could fail if hibernate search was loaded but not enabled.</li>
<li>Jobs could fail if the criteria included a <code>_lastUpdated=lt[date]</code> clause</li>
</ul>"

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 4838
title: "The BALP AsyncMemoryQueueBackedFhirClientBalpSink incorrectly used a non-blocking method
to add events to the blocking queue, resulting in race conditions on a heavily loaded
server."

View File

@ -0,0 +1,7 @@
---
type: add
issue: 4838
title: "When performing a resource reindex on a deleted resource, any search index rows will now
be deleted. Deleting a resource should generally not leave any such rows behind, but they can
be left if the resource is manually deleted using SQL directly against the database and in this
case the reindex job will now clean up these unwanted rows."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4838
title: "When reindexing resources, deleted resources could incorrectly fail validation rules and
cause the reindex job to not complete correctly. This has been corrected."

View File

@ -0,0 +1,6 @@
---
type: add
issue: 4838
title: "When invoking the instance level `$reindex` and `$reindex-dryrun` operations, the resulting
status message and any warnings are now included in the response Parameters object as well as in
the generated response HTML narrative."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4842
title: "There was a bug with the TerserUtil, where it would not overwrite non-empty values with empty values from a source resource. This has been corrected. Thanks to @nigtrifork for the fix!"

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 4881
title: "When _Index Contained References_ is enabled in the JPA server, Bundle resources could not be
stored or indexed due to an incompatibility with the default Bundle search parameters. This has been
corrected."

View File

@ -0,0 +1,8 @@
---
- item:
type: "add"
title: "The version of a few dependencies have been bumped to the latest versions
(dependent HAPI modules listed in brackets):
<ul>
<li>Hibernate ORM (JPA): 5.6.12.Final -&gt; 5.6.15.Final</li>
</ul>"

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -166,7 +166,6 @@ import java.util.stream.Collectors;
import static java.util.Objects.isNull;
import static java.util.Objects.nonNull;
import static org.apache.commons.lang3.BooleanUtils.isFalse;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.left;
@ -537,20 +536,16 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
String newVersion;
long newVersionLong;
if (theResourceId == null || theResourceId.getVersionIdPart() == null) {
newVersion = "1";
newVersionLong = 1;
theSavedEntity.initializeVersion();
} else {
newVersionLong = theResourceId.getVersionIdPartAsLong() + 1;
newVersion = Long.toString(newVersionLong);
theSavedEntity.markVersionUpdatedInCurrentTransaction();
}
assert theResourceId != null;
String newVersion = Long.toString(theSavedEntity.getVersion());
IIdType newId = theResourceId.withVersion(newVersion);
theResource.getIdElement().setValue(newId.getValue());
theSavedEntity.setVersion(newVersionLong);
}
public boolean isLogicalReference(IIdType theId) {
@ -966,7 +961,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
* This should be the very first thing..
*/
if (theResource != null) {
if (thePerformIndexing) {
if (thePerformIndexing && theDeletedTimestampOrNull == null) {
if (!ourValidationDisabledForUnitTest) {
validateResourceForStorage((T) theResource, entity);
}
@ -1062,7 +1057,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
verifyMatchUrlForConditionalCreate(theResource, entity.getCreatedByMatchUrl(), newParams, theRequest);
}
entity.setUpdated(theTransactionDetails.getTransactionDate());
if (CURRENTLY_REINDEXING.get(theResource) != Boolean.TRUE) {
entity.setUpdated(theTransactionDetails.getTransactionDate());
}
newParams.populateResourceTableSearchParamsPresentFlags(entity);
entity.setIndexStatus(INDEX_STATUS_INDEXED);
}
@ -1091,9 +1088,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
return entity;
}
if (theUpdateVersion) {
long newVersion = entity.getVersion() + 1;
entity.setVersion(newVersion);
if (entity.getId() != null && theUpdateVersion) {
entity.markVersionUpdatedInCurrentTransaction();
}
/*
@ -1157,6 +1153,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (thePerformIndexing) {
if (newParams == null) {
myExpungeService.deleteAllSearchParams(JpaPid.fromId(entity.getId()));
entity.clearAllParamsPopulated();
} else {
// Synchronize search param indexes
@ -1296,6 +1293,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) {
boolean versionedTags = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.VERSIONED;
final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags);
historyEntry.setEncoding(theChanged.getEncoding());
historyEntry.setResource(theChanged.getResourceBinary());
@ -1412,11 +1410,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
myJpaStorageResourceParser.populateResourceMetadata(entity, false, tagList, version, theResource);
boolean wasDeleted = false;
// NB If this if-else ever gets collapsed, make sure to account for possible null (will happen in mass-ingestion mode)
if (theOldResource instanceof IResource) {
wasDeleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) theOldResource) != null;
} else if (theOldResource instanceof IAnyResource) {
wasDeleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) theOldResource) != null;
if (theOldResource != null) {
wasDeleted = theOldResource.isDeleted();
}
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType).setCreated(wasDeleted);

View File

@ -35,6 +35,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.dao.ReindexOutcome;
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
@ -199,9 +200,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
private TransactionTemplate myTxTemplate;
@Autowired
private UrlPartitioner myUrlPartitioner;
@Autowired
private ResourceSearchUrlSvc myResourceSearchUrlSvc;
@Autowired
private IFhirSystemDao<?, ?> mySystemDao;
public static <T extends IBaseResource> T invokeStoragePreShowResources(IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequest, T retVal) {
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PRESHOW_RESOURCES, theInterceptorBroadcaster, theRequest)) {
@ -263,12 +265,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
*/
@Override
public DaoMethodOutcome create(final T theResource) {
return create(theResource, null, true, new TransactionDetails(), null);
return create(theResource, null, true, null, new TransactionDetails());
}
@Override
public DaoMethodOutcome create(final T theResource, RequestDetails theRequestDetails) {
return create(theResource, null, true, new TransactionDetails(), theRequestDetails);
return create(theResource, null, true, theRequestDetails, new TransactionDetails());
}
/**
@ -281,11 +283,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Override
public DaoMethodOutcome create(final T theResource, String theIfNoneExist, RequestDetails theRequestDetails) {
return create(theResource, theIfNoneExist, true, new TransactionDetails(), theRequestDetails);
return create(theResource, theIfNoneExist, true, theRequestDetails, new TransactionDetails());
}
@Override
public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, @Nonnull TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) {
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName());
return myTransactionService
.withRequest(theRequestDetails)
@ -340,7 +342,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
entity.setResourceType(toResourceName(theResource));
entity.setPartitionId(PartitionablePartitionId.toStoragePartition(theRequestPartitionId, myPartitionSettings));
entity.setCreatedByMatchUrl(theMatchUrl);
entity.setVersion(1);
entity.initializeVersion();
if (isNotBlank(theMatchUrl) && theProcessMatchUrl) {
Set<JpaPid> match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest);
@ -348,19 +350,51 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theMatchUrl, match.size());
throw new PreconditionFailedException(Msg.code(958) + msg);
} else if (match.size() == 1) {
JpaPid pid = match.iterator().next();
Supplier<LazyDaoMethodOutcome.EntityAndResource> entitySupplier = () -> {
return myTxTemplate.execute(tx -> {
/*
* Ok, so we've found a single PID that matches the conditional URL.
* That's good, there are two possibilities below.
*/
JpaPid pid = match.iterator().next();
if (theTransactionDetails.getDeletedResourceIds().contains(pid)) {
/*
* If the resource matching the given match URL has already been
* deleted within this transaction. This is a really rare case, since
* it means the client has performed a FHIR transaction with both
* a delete and a create on the same conditional URL. This is rare
* but allowed, and means that it's now ok to create a new one resource
* matching the conditional URL since we'll be deleting any existing
* index rows on the existing resource as a part of this transaction.
* We can also un-resolve the previous match URL in the TransactionDetails
* since we'll resolve it to the new resource ID below
*/
myMatchResourceUrlService.unresolveMatchUrl(theTransactionDetails, getResourceName(), theMatchUrl);
} else {
/*
* This is the normal path where the conditional URL matched exactly
* one resource, so we won't be creating anything but instead
* just returning the existing ID. We now have a PID for the matching
* resource, but we haven't loaded anything else (e.g. the forced ID
* or the resource body aren't yet loaded from the DB). We're going to
* return a LazyDaoOutcome with two lazy loaded providers for loading the
* entity and the forced ID since we can avoid these extra SQL loads
* unless we know we're actually going to use them. For example, if
* the client has specified "Prefer: return=minimal" then we won't be
* needing the load the body.
*/
Supplier<LazyDaoMethodOutcome.EntityAndResource> entitySupplier = () -> myTxTemplate.execute(tx -> {
ResourceTable foundEntity = myEntityManager.find(ResourceTable.class, pid.getId());
IBaseResource resource = myJpaStorageResourceParser.toResource(foundEntity, false);
theResource.setId(resource.getIdElement().getValue());
return new LazyDaoMethodOutcome.EntityAndResource(foundEntity, resource);
});
};
Supplier<IIdType> idSupplier = () -> {
return myTxTemplate.execute(tx -> {
Supplier<IIdType> idSupplier = () -> myTxTemplate.execute(tx -> {
IIdType retVal = myIdHelperService.translatePidIdToForcedId(myFhirContext, myResourceName, pid);
if (!retVal.hasVersionIdPart()) {
Long version = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, pid.getId());
@ -376,13 +410,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
return retVal;
});
};
DaoMethodOutcome outcome = toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true);
StorageResponseCodeEnum responseCode = StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH;
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreateConditionalWithMatch", w.getMillisAndRestart(), UrlUtil.sanitizeUrlPart(theMatchUrl));
outcome.setOperationOutcome(createInfoOperationOutcome(msg, responseCode));
return outcome;
DaoMethodOutcome outcome = toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true);
StorageResponseCodeEnum responseCode = StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH;
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "successfulCreateConditionalWithMatch", w.getMillisAndRestart(), UrlUtil.sanitizeUrlPart(theMatchUrl));
outcome.setOperationOutcome(createInfoOperationOutcome(msg, responseCode));
return outcome;
}
}
}
@ -617,12 +651,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
throw new ResourceVersionConflictException(Msg.code(961) + "Trying to delete " + theId + " but this is not the current version");
}
JpaPid persistentId = JpaPid.fromId(entity.getResourceId());
theTransactionDetails.addDeletedResourceId(persistentId);
// Don't delete again if it's already deleted
if (isDeleted(entity)) {
DaoMethodOutcome outcome = createMethodOutcomeForResourceId(entity.getIdDt().getValue(), MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED, StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED);
// used to exist, so we'll set the persistent id
outcome.setPersistentId(JpaPid.fromId(entity.getResourceId()));
outcome.setPersistentId(persistentId);
outcome.setEntity(entity);
return outcome;
@ -681,7 +718,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return myTransactionService.execute(theRequest, transactionDetails, tx -> {
DeleteConflictList deleteConflicts = new DeleteConflictList();
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest);
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest, transactionDetails);
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
return outcome;
});
@ -692,20 +729,19 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
* transaction processors
*/
@Override
public DeleteMethodOutcome deleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequestDetails) {
public DeleteMethodOutcome deleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) {
validateDeleteEnabled();
TransactionDetails transactionDetails = new TransactionDetails();
return myTransactionService.execute(theRequestDetails, transactionDetails, tx -> doDeleteByUrl(theUrl, deleteConflicts, theRequestDetails));
return myTransactionService.execute(theRequestDetails, theTransactionDetails, tx -> doDeleteByUrl(theUrl, deleteConflicts, theTransactionDetails, theRequestDetails));
}
@Nonnull
private DeleteMethodOutcome doDeleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequest) {
private DeleteMethodOutcome doDeleteByUrl(String theUrl, DeleteConflictList deleteConflicts, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl);
SearchParameterMap paramMap = resourceSearch.getSearchParameterMap();
paramMap.setLoadSynchronous(true);
Set<JpaPid> resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequest, null);
Set<JpaPid> resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequestDetails, null);
if (resourceIds.size() > 1) {
if (!getStorageSettings().isAllowMultipleDelete()) {
@ -713,7 +749,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequest);
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequestDetails, theTransactionDetails);
}
@Override
@ -733,15 +769,23 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
@Nonnull
@Override
public <P extends IResourcePersistentId> DeleteMethodOutcome deletePidList(String theUrl, Collection<P> theResourceIds, DeleteConflictList theDeleteConflicts, RequestDetails theRequest) {
public <P extends IResourcePersistentId> DeleteMethodOutcome deletePidList(String theUrl, Collection<P> theResourceIds, DeleteConflictList theDeleteConflicts, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
StopWatch w = new StopWatch();
TransactionDetails transactionDetails = new TransactionDetails();
List<ResourceTable> deletedResources = new ArrayList<>();
List<IResourcePersistentId<?>> resolvedIds = theResourceIds
.stream()
.map(t -> (IResourcePersistentId<?>) t)
.collect(Collectors.toList());
mySystemDao.preFetchResources(resolvedIds, false);
for (P pid : theResourceIds) {
JpaPid jpaPid = (JpaPid) pid;
// This shouldn't actually need to hit the DB because we pre-fetch above
ResourceTable entity = myEntityManager.find(ResourceTable.class, jpaPid.getId());
deletedResources.add(entity);
@ -750,18 +794,18 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// Notify IServerOperationInterceptors about pre-action call
HookParams hooks = new HookParams()
.add(IBaseResource.class, resourceToDelete)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(TransactionDetails.class, transactionDetails);
doCallHooks(transactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED, hooks);
doCallHooks(transactionDetails, theRequestDetails, Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED, hooks);
myDeleteConflictService.validateOkToDelete(theDeleteConflicts, entity, false, theRequest, transactionDetails);
myDeleteConflictService.validateOkToDelete(theDeleteConflicts, entity, false, theRequestDetails, transactionDetails);
// Perform delete
preDelete(resourceToDelete, entity, theRequest);
preDelete(resourceToDelete, entity, theRequestDetails);
updateEntityForDelete(theRequest, transactionDetails, entity);
updateEntityForDelete(theRequestDetails, transactionDetails, entity);
resourceToDelete.setId(entity.getIdDt());
// Notify JPA interceptors
@ -770,11 +814,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
public void beforeCommit(boolean readOnly) {
HookParams hookParams = new HookParams()
.add(IBaseResource.class, resourceToDelete)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(TransactionDetails.class, transactionDetails)
.add(InterceptorInvocationTimingEnum.class, transactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED));
doCallHooks(transactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
doCallHooks(transactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
}
});
}
@ -791,6 +835,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
ourLog.debug("Processed delete on {} (matched {} resource(s)) in {}ms", theUrl, deletedResources.size(), w.getMillis());
theTransactionDetails.addDeletedResourceIds(theResourceIds);
DeleteMethodOutcome retVal = new DeleteMethodOutcome();
retVal.setDeletedEntities(deletedResources);
retVal.setOperationOutcome(oo);
@ -825,10 +871,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
boolean hasTag = false;
for (BaseTag next : new ArrayList<>(theEntity.getTags())) {
if (Objects.equals(next.getTag().getTagType(), nextDef.getTagType()) &&
Objects.equals(next.getTag().getSystem(), nextDef.getSystem()) &&
Objects.equals(next.getTag().getCode(), nextDef.getCode()) &&
Objects.equals(next.getTag().getVersion(), nextDef.getVersion()) &&
Objects.equals(next.getTag().getUserSelected(), nextDef.getUserSelected())) {
Objects.equals(next.getTag().getSystem(), nextDef.getSystem()) &&
Objects.equals(next.getTag().getCode(), nextDef.getCode()) &&
Objects.equals(next.getTag().getVersion(), nextDef.getVersion()) &&
Objects.equals(next.getTag().getUserSelected(), nextDef.getUserSelected())) {
hasTag = true;
break;
}
@ -1341,7 +1387,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
if (theReindexParameters.getReindexSearchParameters() == ReindexParameters.ReindexSearchParametersEnum.ALL) {
reindexSearchParameters(entity, retVal);
reindexSearchParameters(entity, retVal, theTransactionDetails);
}
if (theReindexParameters.getOptimizeStorage() != ReindexParameters.OptimizeStorageModeEnum.NONE) {
reindexOptimizeStorage(entity, theReindexParameters.getOptimizeStorage());
@ -1351,23 +1397,49 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
@SuppressWarnings("unchecked")
private void reindexSearchParameters(ResourceTable entity, ReindexOutcome theReindexOutcome) {
private void reindexSearchParameters(ResourceTable entity, ReindexOutcome theReindexOutcome, TransactionDetails theTransactionDetails) {
try {
T resource = (T) myJpaStorageResourceParser.toResource(entity, false);
reindex(resource, entity);
reindexSearchParameters(resource, entity, theTransactionDetails);
} catch (Exception e) {
theReindexOutcome.addWarning("Failed to reindex resource " + entity.getIdDt() + ": " + e);
myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED);
}
}
/**
* @deprecated Use {@link #reindex(IResourcePersistentId, ReindexParameters, RequestDetails, TransactionDetails)}
*/
@Deprecated
@Override
public void reindex(T theResource, IBasePersistedResource theEntity) {
assert TransactionSynchronizationManager.isActualTransactionActive();
ResourceTable entity = (ResourceTable) theEntity;
TransactionDetails transactionDetails = new TransactionDetails(entity.getUpdatedDate());
reindexSearchParameters(theResource, theEntity, transactionDetails);
}
private void reindexSearchParameters(T theResource, IBasePersistedResource theEntity, TransactionDetails transactionDetails) {
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId());
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
}
ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false);
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, null);
}
}
private void reindexOptimizeStorage(ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) {
ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity();
if (historyEntity != null) {
reindexOptimizeStorageHistoryEntity(entity, historyEntity);
if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) {
int pageSize = 100;
for (int page = 0; ((long)page * pageSize) < entity.getVersion(); page++) {
for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) {
Slice<ResourceHistoryTable> historyEntities = myResourceHistoryTableDao.findForResourceIdAndReturnEntities(PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion());
for (ResourceHistoryTable next : historyEntities) {
reindexOptimizeStorageHistoryEntity(entity, next);
@ -1500,23 +1572,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return entity;
}
@Override
public void reindex(T theResource, IBasePersistedResource theEntity) {
assert TransactionSynchronizationManager.isActualTransactionActive();
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId());
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
}
ResourceTable entity = (ResourceTable) theEntity;
TransactionDetails transactionDetails = new TransactionDetails(entity.getUpdatedDate());
ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false);
if (theResource != null) {
CURRENTLY_REINDEXING.put(theResource, null);
}
}
@Transactional
@Override
public void removeTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm) {

View File

@ -1,3 +1,22 @@
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.i18n.Msg;
@ -7,14 +26,14 @@ import org.hl7.fhir.instance.model.api.IBaseBooleanDatatype;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* We are trying to preserve null behaviour despite IBaseCoding using primitive boolean for userSelected.
*/
public class CodingSpy {
final Map<Class, Field> mySpies = new HashMap<>();
final Map<Class, Field> mySpies = new ConcurrentHashMap<>();
/**
* Reach into the Coding and pull out the Boolean instead of the boolean.

View File

@ -85,6 +85,10 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long>, I
@Query("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id")
void updateIndexStatus(@Param("id") Long theId, @Param("status") Long theIndexStatus);
@Modifying
@Query("UPDATE ResourceTable t SET t.myUpdated = :updated WHERE t.myId = :id")
void updateLastUpdated(@Param("id") Long theId, @Param("updated") Date theUpdated);
@Modifying
@Query("DELETE FROM ResourceTable t WHERE t.myId = :pid")
void deleteByPid(@Param("pid") Long theId);

View File

@ -39,9 +39,6 @@ import java.util.stream.Collectors;
public class DeleteExpungeSqlBuilder {
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSqlBuilder.class);
public static final String PROCESS_NAME = "Delete Expunging";
public static final String THREAD_PREFIX = "delete-expunge";
private final ResourceTableFKProvider myResourceTableFKProvider;
private final JpaStorageSettings myStorageSettings;
private final IIdHelperService myIdHelper;

View File

@ -69,7 +69,7 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc<JpaPid> {
* This method clears the Hibernate Search index for the given resources.
*/
private void clearHibernateSearchIndex(List<JpaPid> thePersistentIds) {
if (myFullTextSearchSvc != null) {
if (myFullTextSearchSvc != null && !myFullTextSearchSvc.isDisabled()) {
List<Object> objectIds = thePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList());
myFullTextSearchSvc.deleteIndexedDocumentsByTypeAndId(ResourceTable.class, objectIds);
ourLog.info("Cleared Hibernate Search indexes.");

View File

@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity;
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
@ -58,8 +59,6 @@ import org.hl7.fhir.utilities.npm.NpmPackage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
@ -90,7 +89,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
@Autowired
private IHapiPackageCacheManager myPackageCacheManager;
@Autowired
private PlatformTransactionManager myTxManager;
private IHapiTransactionService myTxService;
@Autowired
private INpmPackageVersionDao myPackageVersionDao;
@Autowired
@ -128,9 +127,10 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
}
}
public PackageDeleteOutcomeJson uninstall(PackageInstallationSpec theInstallationSpec) {
return myPackageCacheManager.uninstallPackage(theInstallationSpec.getName(), theInstallationSpec.getVersion());
}
@Override
public PackageDeleteOutcomeJson uninstall(PackageInstallationSpec theInstallationSpec) {
return myPackageCacheManager.uninstallPackage(theInstallationSpec.getName(), theInstallationSpec.getVersion());
}
/**
* Loads and installs an IG from a file on disk or the Simplifier repo using
@ -152,12 +152,12 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
if (enabled) {
try {
boolean exists = new TransactionTemplate(myTxManager).execute(tx -> {
boolean exists = myTxService.withSystemRequest().withRequestPartitionId(RequestPartitionId.defaultPartition()).execute(() -> {
Optional<NpmPackageVersionEntity> existing = myPackageVersionDao.findByPackageIdAndVersion(theInstallationSpec.getName(), theInstallationSpec.getVersion());
return existing.isPresent();
});
if (exists) {
ourLog.info("Package {}#{} is already installed", theInstallationSpec.getName(), theInstallationSpec.getVersion());
ourLog.info("Package {}#{} is already installed", theInstallationSpec.getName(), theInstallationSpec.getVersion());
}
NpmPackage npmPackage = myPackageCacheManager.installPackage(theInstallationSpec);
@ -267,8 +267,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
}
} catch (IOException e) {
throw new ImplementationGuideInstallationException(Msg.code(1287) + String.format(
"Cannot resolve dependency %s#%s", id, ver), e);
throw new ImplementationGuideInstallationException(Msg.code(1287) + String.format("Cannot resolve dependency %s#%s", id, ver), e);
}
}
}
@ -278,8 +277,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
* Asserts if package FHIR version is compatible with current FHIR version
* by using semantic versioning rules.
*/
protected void assertFhirVersionsAreCompatible(String fhirVersion, String currentFhirVersion)
throws ImplementationGuideInstallationException {
protected void assertFhirVersionsAreCompatible(String fhirVersion, String currentFhirVersion) throws ImplementationGuideInstallationException {
FhirVersionEnum fhirVersionEnum = FhirVersionEnum.forVersionString(fhirVersion);
FhirVersionEnum currentFhirVersionEnum = FhirVersionEnum.forVersionString(currentFhirVersion);
@ -290,9 +288,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
compatible = true;
}
if (!compatible) {
throw new ImplementationGuideInstallationException(Msg.code(1288) + String.format(
"Cannot install implementation guide: FHIR versions mismatch (expected <=%s, package uses %s)",
currentFhirVersion, fhirVersion));
throw new ImplementationGuideInstallationException(Msg.code(1288) + String.format("Cannot install implementation guide: FHIR versions mismatch (expected <=%s, package uses %s)", currentFhirVersion, fhirVersion));
}
}
@ -336,26 +332,18 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
ourLog.info("Skipping update of existing resource matching {}", map.toNormalizedQueryString(myFhirContext));
}
}
}
else{
} else {
ourLog.warn("Failed to upload resource of type {} with ID {} - Error: Resource failed validation", theResource.fhirType(), theResource.getIdElement().getValue());
}
}
private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) {
if (myPartitionSettings.isPartitioningEnabled()) {
SystemRequestDetails requestDetails = newSystemRequestDetails();
return theDao.search(theMap, requestDetails);
} else {
return theDao.search(theMap);
}
return theDao.search(theMap, newSystemRequestDetails());
}
@Nonnull
private SystemRequestDetails newSystemRequestDetails() {
return
new SystemRequestDetails()
.setRequestPartitionId(RequestPartitionId.defaultPartition());
return new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.defaultPartition());
}
private void createResource(IFhirResourceDao theDao, IBaseResource theResource) {
@ -400,8 +388,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
}
if (!isValidResourceStatusForPackageUpload(theResource)) {
ourLog.warn("Failed to validate resource of type {} with ID {} - Error: Resource status not accepted value.",
theResource.fhirType(), theResource.getIdElement().getValue());
ourLog.warn("Failed to validate resource of type {} with ID {} - Error: Resource status not accepted value.", theResource.fhirType(), theResource.getIdElement().getValue());
return false;
}
@ -458,8 +445,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
try {
return validationSupport.generateSnapshot(new ValidationSupportContext(validationSupport), sd, null, null, null);
} catch (Exception e) {
throw new ImplementationGuideInstallationException(Msg.code(1290) + String.format(
"Failure when generating snapshot of StructureDefinition: %s", sd.getIdElement()), e);
throw new ImplementationGuideInstallationException(Msg.code(1290) + String.format("Failure when generating snapshot of StructureDefinition: %s", sd.getIdElement()), e);
}
}

View File

@ -26,10 +26,13 @@ import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.ReindexOutcome;
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.*;
import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
@ -40,6 +43,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseParameters;
@ -59,6 +63,7 @@ import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -132,6 +137,7 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
@SuppressWarnings({"unchecked", "rawtypes"})
@Nonnull
private Parameters reindexInTransaction(RequestDetails theRequestDetails, IIdType theResourceId) {
StopWatch sw = new StopWatch();
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType());
ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails);
IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false);
@ -144,16 +150,26 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
ResourceIndexedSearchParams existingParamsToPopulate = new ResourceIndexedSearchParams(entity);
existingParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents());
dao.reindex(resource, entity);
List<String> messages = new ArrayList<>();
JpaPid pid = JpaPid.fromId(entity.getId());
ReindexOutcome outcome = dao.reindex(pid, new ReindexParameters(), theRequestDetails, new TransactionDetails());
messages.add("Reindex completed in " + sw);
for (String next : outcome.getWarnings()) {
messages.add("WARNING: " + next);
}
ResourceIndexedSearchParams newParamsToPopulate = new ResourceIndexedSearchParams(entity);
newParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents());
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, true);
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, true, messages);
}
@Nonnull
private Parameters reindexDryRunInTransaction(RequestDetails theRequestDetails, IIdType theResourceId, RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails, Set<String> theParameters) {
StopWatch sw = new StopWatch();
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType());
ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails);
IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false);
@ -186,7 +202,8 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
showAction = false;
}
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, showAction);
String message = "Reindex dry-run completed in " + sw + ". No changes were committed to any stored data.";
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, showAction, List.of(message));
}
@Nonnull
@ -197,12 +214,16 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
@Nonnull
@VisibleForTesting
Parameters buildIndexResponse(ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, boolean theShowAction) {
Parameters buildIndexResponse(ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, boolean theShowAction, List<String> theMessages) {
Parameters parameters = new Parameters();
Parameters.ParametersParameterComponent narrativeParameter = parameters.addParameter();
narrativeParameter.setName("Narrative");
for (String next : theMessages) {
parameters.addParameter("Message", new StringType(next));
}
// Normal indexes
addParamsNonMissing(parameters, "CoordinateIndexes", "Coords", theExistingParams.myCoordsParams, theNewParams.myCoordsParams, new CoordsParamPopulator(), theShowAction);
addParamsNonMissing(parameters, "DateIndexes", "Date", theExistingParams.myDateParams, theNewParams.myDateParams, new DateParamPopulator(), theShowAction);

View File

@ -1,5 +1,15 @@
<div xmlns:th="http://www.thymeleaf.org">
<!--/* Messages */-->
<div th:if="${resource.hasParameter('Message')}" id="Messages">
<h1>Outcome</h1>
<ul>
<li th:each="part : ${resource.getParameters('Message')}">
[[${part.getValue().getValue()}]]
</li>
</ul>
</div>
<!--/* Number Indexes */-->
<div th:if="${resource.hasParameter('NumberIndexes')}" id="NumberIndexes">
<h1>Number Indexes</h1>

View File

@ -1,12 +1,263 @@
package ca.uhn.fhir.jpa.packages;
import org.elasticsearch.common.inject.Inject;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.Communication;
import org.hl7.fhir.r4.model.DocumentReference;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.SearchParameter;
import org.hl7.fhir.r4.model.Subscription;
import org.hl7.fhir.utilities.npm.NpmPackage;
import org.hl7.fhir.utilities.npm.PackageGenerator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import javax.annotation.Nonnull;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class PackageInstallerSvcImplTest {
public static final String PACKAGE_VERSION = "1.0";
public static final String PACKAGE_ID_1 = "package1";
@Mock
private INpmPackageVersionDao myPackageVersionDao;
@Mock
private IHapiPackageCacheManager myPackageCacheManager;
@Mock
private ISearchParamRegistryController mySearchParamRegistryController;
@Mock
private DaoRegistry myDaoRegistry;
@Mock
private IFhirResourceDao<CodeSystem> myCodeSystemDao;
@Spy
private FhirContext myCtx = FhirContext.forR4Cached();
@Spy
private IHapiTransactionService myTxService = new NonTransactionalHapiTransactionService();
@Spy
private PackageResourceParsingSvc myPackageResourceParsingSvc = new PackageResourceParsingSvc(myCtx);
@Spy
private PartitionSettings myPartitionSettings = new PartitionSettings();
@InjectMocks
private PackageInstallerSvcImpl mySvc;
@Test
public void testPackageCompatibility() {
new PackageInstallerSvcImpl().assertFhirVersionsAreCompatible("R4", "R4B");
mySvc.assertFhirVersionsAreCompatible("R4", "R4B");
}
@Test
public void testValidForUpload_SearchParameterWithMetaParam() {
SearchParameter sp = new SearchParameter();
sp.setCode("_id");
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_SearchParameterWithNoBase() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.setExpression("Patient.name");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_SearchParameterWithNoExpression() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.addBase("Patient");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertFalse(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_GoodSearchParameter() {
SearchParameter sp = new SearchParameter();
sp.setCode("name");
sp.addBase("Patient");
sp.setExpression("Patient.name");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
assertTrue(mySvc.validForUpload(sp));
}
@Test
public void testValidForUpload_RequestedSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.REQUESTED);
assertTrue(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_ErrorSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.ERROR);
assertFalse(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_ActiveSubscription() {
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
new Subscription.SubscriptionChannelComponent()
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
.setEndpoint("https://tinyurl.com/2p95e27r");
Subscription subscription = new Subscription();
subscription.setCriteria("Patient?name=smith");
subscription.setChannel(subscriptionChannelComponent);
subscription.setStatus(Subscription.SubscriptionStatus.ACTIVE);
assertFalse(mySvc.validForUpload(subscription));
}
@Test
public void testValidForUpload_DocumentRefStatusValuePresent() {
DocumentReference documentReference = new DocumentReference();
documentReference.setStatus(Enumerations.DocumentReferenceStatus.ENTEREDINERROR);
assertTrue(mySvc.validForUpload(documentReference));
}
@Test
public void testValidForUpload_DocumentRefStatusValueNull() {
DocumentReference documentReference = new DocumentReference();
documentReference.setStatus(Enumerations.DocumentReferenceStatus.NULL);
assertFalse(mySvc.validForUpload(documentReference));
documentReference.setStatus(null);
assertFalse(mySvc.validForUpload(documentReference));
}
@Test
public void testValidForUpload_CommunicationStatusValuePresent() {
Communication communication = new Communication();
communication.setStatus(Communication.CommunicationStatus.NOTDONE);
assertTrue(mySvc.validForUpload(communication));
}
@Test
public void testValidForUpload_CommunicationStatusValueNull() {
Communication communication = new Communication();
communication.setStatus(Communication.CommunicationStatus.NULL);
assertFalse(mySvc.validForUpload(communication));
communication.setStatus(null);
assertFalse(mySvc.validForUpload(communication));
}
@Test
public void testDontTryToInstallDuplicateCodeSystem_CodeSystemAlreadyExistsWithDifferentId() throws IOException {
// Setup
// The CodeSystem that is already saved in the repository
CodeSystem existingCs = new CodeSystem();
existingCs.setId("CodeSystem/existingcs");
existingCs.setUrl("http://my-code-system");
existingCs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
// A new code system in a package we're installing that has the
// same URL as the previously saved one, but a different ID.
CodeSystem cs = new CodeSystem();
cs.setId("CodeSystem/mycs");
cs.setUrl("http://my-code-system");
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
NpmPackage pkg = createPackage(cs, PACKAGE_ID_1);
when(myPackageVersionDao.findByPackageIdAndVersion(any(), any())).thenReturn(Optional.empty());
when(myPackageCacheManager.installPackage(any())).thenReturn(pkg);
when(myDaoRegistry.getResourceDao(CodeSystem.class)).thenReturn(myCodeSystemDao);
when(myCodeSystemDao.search(any(), any())).thenReturn(new SimpleBundleProvider(existingCs));
when(myCodeSystemDao.update(any(),any(RequestDetails.class))).thenReturn(new DaoMethodOutcome());
PackageInstallationSpec spec = new PackageInstallationSpec();
spec.setName(PACKAGE_ID_1);
spec.setVersion(PACKAGE_VERSION);
spec.setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL);
spec.setPackageContents(packageToBytes(pkg));
// Test
mySvc.install(spec);
// Verify
verify(myCodeSystemDao, times(1)).search(mySearchParameterMapCaptor.capture(), any());
SearchParameterMap map = mySearchParameterMapCaptor.getValue();
assertEquals("?url=http%3A%2F%2Fmy-code-system", map.toNormalizedQueryString(myCtx));
verify(myCodeSystemDao, times(1)).update(myCodeSystemCaptor.capture(), any(RequestDetails.class));
CodeSystem codeSystem = myCodeSystemCaptor.getValue();
assertEquals("existingcs", codeSystem.getIdPart());
}
@Nonnull
private static byte[] packageToBytes(NpmPackage pkg) throws IOException {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
pkg.save(stream);
byte[] bytes = stream.toByteArray();
return bytes;
}
@Captor
private ArgumentCaptor<SearchParameterMap> mySearchParameterMapCaptor;
@Captor
private ArgumentCaptor<CodeSystem> myCodeSystemCaptor;
@Nonnull
private NpmPackage createPackage(CodeSystem cs, String packageId) throws IOException {
PackageGenerator manifestGenerator = new PackageGenerator();
manifestGenerator.name(packageId);
manifestGenerator.version(PACKAGE_VERSION);
manifestGenerator.description("a package");
manifestGenerator.fhirVersions(List.of(FhirVersionEnum.R4.getFhirVersionString()));
NpmPackage pkg = NpmPackage.empty(manifestGenerator);
String csString = myCtx.newJsonParser().encodeResourceToString(cs);
pkg.addFile("package", "cs.json", csString.getBytes(StandardCharsets.UTF_8), "CodeSystem");
return pkg;
}
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -127,7 +127,7 @@ public class PersistObservationIndexedSearchParamLastNR4IT {
ResourceTable entity = new ResourceTable();
entity.setId(55L);
entity.setResourceType("Observation");
entity.setVersion(0L);
entity.setVersionForUnitTest(0L);
testObservationPersist.deleteObservationIndex(entity);
elasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -20,6 +20,7 @@ import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import javax.servlet.http.HttpServletRequest;
import java.util.function.Supplier;
import static org.awaitility.Awaitility.await;
import static org.mockito.Mockito.when;
@ -108,4 +109,19 @@ public abstract class BaseMdmHelper implements BeforeEachCallback, AfterEachCall
public PointcutLatch getAfterMdmLatch() {
return myAfterMdmLatch;
}
/**
* Expect 1 call to the MDM_AFTER_PERSISTED_RESOURCE_CHECKED pointcut when calling theSupplier. Wait until
* the mdm message arrives and this pointcut is called before returning the result of theSupplier.
* @param theSupplier
* @return
* @param <T>
* @throws InterruptedException
*/
public <T> T executeWithLatch(Supplier<T> theSupplier) throws InterruptedException {
myAfterMdmLatch.setExpectedCount(1);
T retval = theSupplier.get();
myAfterMdmLatch.awaitExpected();
return retval;
}
}

View File

@ -1,15 +1,20 @@
package ca.uhn.fhir.jpa.mdm.helper;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.mdm.api.MdmLinkEvent;
import ca.uhn.fhir.rest.server.TransactionLogMessages;
import ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage;
import ca.uhn.test.concurrency.PointcutLatch;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.Patient;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import java.util.List;
import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_GOLDEN_RECORD;
import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS;
@ -27,8 +32,8 @@ public class MdmHelperR4 extends BaseMdmHelper {
public OutcomeAndLogMessageWrapper createWithLatch(IBaseResource theBaseResource, boolean isExternalHttpRequest) throws InterruptedException {
myAfterMdmLatch.setExpectedCount(1);
DaoMethodOutcome daoMethodOutcome = doCreateResource(theBaseResource, isExternalHttpRequest);
myAfterMdmLatch.awaitExpected();
return new OutcomeAndLogMessageWrapper(daoMethodOutcome, myAfterMdmLatch.getLatchInvocationParameterOfType(TransactionLogMessages.class));
List<HookParams> hookParams = myAfterMdmLatch.awaitExpected();
return new OutcomeAndLogMessageWrapper(daoMethodOutcome, hookParams);
}
public OutcomeAndLogMessageWrapper updateWithLatch(IBaseResource theIBaseResource) throws InterruptedException {
@ -38,8 +43,8 @@ public class MdmHelperR4 extends BaseMdmHelper {
public OutcomeAndLogMessageWrapper updateWithLatch(IBaseResource theIBaseResource, boolean isExternalHttpRequest) throws InterruptedException {
myAfterMdmLatch.setExpectedCount(1);
DaoMethodOutcome daoMethodOutcome = doUpdateResource(theIBaseResource, isExternalHttpRequest);
myAfterMdmLatch.awaitExpected();
return new OutcomeAndLogMessageWrapper(daoMethodOutcome, myAfterMdmLatch.getLatchInvocationParameterOfType(TransactionLogMessages.class));
List<HookParams> hookParams = myAfterMdmLatch.awaitExpected();
return new OutcomeAndLogMessageWrapper(daoMethodOutcome, hookParams);
}
public DaoMethodOutcome doCreateResource(IBaseResource theResource, boolean isExternalHttpRequest) {
@ -68,12 +73,12 @@ public class MdmHelperR4 extends BaseMdmHelper {
* by the MDM module.
*/
public class OutcomeAndLogMessageWrapper {
DaoMethodOutcome myDaoMethodOutcome;
TransactionLogMessages myLogMessages;
private final DaoMethodOutcome myDaoMethodOutcome;
private final List<HookParams> myHookParams;
private OutcomeAndLogMessageWrapper(DaoMethodOutcome theDaoMethodOutcome, TransactionLogMessages theTransactionLogMessages) {
public OutcomeAndLogMessageWrapper(DaoMethodOutcome theDaoMethodOutcome, List<HookParams> theHookParams) {
myDaoMethodOutcome = theDaoMethodOutcome;
myLogMessages = theTransactionLogMessages;
myHookParams = theHookParams;
}
public DaoMethodOutcome getDaoMethodOutcome() {
@ -81,7 +86,19 @@ public class MdmHelperR4 extends BaseMdmHelper {
}
public TransactionLogMessages getLogMessages() {
return myLogMessages;
return PointcutLatch.getInvocationParameterOfType(myHookParams, TransactionLogMessages.class);
}
public List<HookParams> getHookParams() {
return myHookParams;
}
public MdmLinkEvent getMdmLinkEvent() {
return PointcutLatch.getInvocationParameterOfType(myHookParams, MdmLinkEvent.class);
}
public ResourceOperationMessage getResourceOperationMessage() {
return PointcutLatch.getInvocationParameterOfType(myHookParams, ResourceOperationMessage.class);
}
}

View File

@ -58,9 +58,9 @@ public class MdmEventIT extends BaseMdmR4Test {
addExternalEID(patient2, "eid-11");
addExternalEID(patient2, "eid-22");
myMdmHelper.updateWithLatch(patient2);
MdmHelperR4.OutcomeAndLogMessageWrapper outcome = myMdmHelper.updateWithLatch(patient2);
MdmLinkEvent linkChangeEvent = myMdmHelper.getAfterMdmLatch().getLatchInvocationParameterOfType(MdmLinkEvent.class);
MdmLinkEvent linkChangeEvent = outcome.getMdmLinkEvent();
assertNotNull(linkChangeEvent);
ourLog.info("Got event: {}", linkChangeEvent);
@ -84,15 +84,15 @@ public class MdmEventIT extends BaseMdmR4Test {
@Test
public void testCreateLinkChangeEvent() throws InterruptedException {
Practitioner pr = buildPractitionerWithNameAndId("Young", "AC-DC");
myMdmHelper.createWithLatch(pr);
MdmHelperR4.OutcomeAndLogMessageWrapper outcome = myMdmHelper.createWithLatch(pr);
ResourceOperationMessage resourceOperationMessage = myMdmHelper.getAfterMdmLatch().getLatchInvocationParameterOfType(ResourceOperationMessage.class);
ResourceOperationMessage resourceOperationMessage = outcome.getResourceOperationMessage();
assertNotNull(resourceOperationMessage);
assertEquals(pr.getIdElement().toUnqualifiedVersionless().getValue(), resourceOperationMessage.getId());
MdmLink link = getLinkByTargetId(pr);
MdmLinkEvent linkChangeEvent = myMdmHelper.getAfterMdmLatch().getLatchInvocationParameterOfType(MdmLinkEvent.class);
MdmLinkEvent linkChangeEvent = outcome.getMdmLinkEvent();
assertNotNull(linkChangeEvent);
assertEquals(1, linkChangeEvent.getMdmLinks().size());
@ -110,9 +110,9 @@ public class MdmEventIT extends BaseMdmR4Test {
@Test
public void testUpdateLinkChangeEvent() throws InterruptedException {
Patient patient1 = addExternalEID(buildJanePatient(), "eid-1");
myMdmHelper.createWithLatch(patient1);
MdmHelperR4.OutcomeAndLogMessageWrapper outcome = myMdmHelper.createWithLatch(patient1);
MdmLinkEvent linkChangeEvent = myMdmHelper.getAfterMdmLatch().getLatchInvocationParameterOfType(MdmLinkEvent.class);
MdmLinkEvent linkChangeEvent = outcome.getMdmLinkEvent();
assertNotNull(linkChangeEvent);
assertEquals(1, linkChangeEvent.getMdmLinks().size());

View File

@ -257,12 +257,11 @@ public class MdmSearchExpandingInterceptorIT extends BaseMdmR4Test {
}
@Test
public void testReferenceExpansionQuietlyFailsOnMissingMdmMatches() {
public void testReferenceExpansionQuietlyFailsOnMissingMdmMatches() throws InterruptedException {
myStorageSettings.setAllowMdmExpansion(true);
Patient patient = buildJanePatient();
patient.getMeta().addTag(MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_NO_MDM_MANAGED, "Don't MDM on me!");
DaoMethodOutcome daoMethodOutcome = myMdmHelper.doCreateResource(patient, true);
String id = daoMethodOutcome.getId().getIdPart();
String id = myMdmHelper.executeWithLatch(() -> myMdmHelper.doCreateResource(patient, true)).getId().getIdPart();
createObservationWithSubject(id);
//Even though the user has NO mdm links, that should not cause a request failure.

View File

@ -32,7 +32,7 @@ public abstract class BaseProviderR4Test extends BaseMdmR4Test {
@Autowired
private IMdmSubmitSvc myMdmSubmitSvc;
@Autowired
private MdmSettings myMdmSettings;
protected MdmSettings myMdmSettings;
@Autowired
private MdmControllerHelper myMdmHelper;
@Autowired

View File

@ -1,7 +1,7 @@
package ca.uhn.fhir.jpa.mdm.provider;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.mdm.api.MdmConstants;
@ -73,6 +73,7 @@ public class MdmProviderCreateLinkR4Test extends BaseLinkR4Test {
@Test
public void testCreateLinkWithMatchResultOnDifferentPartitions() {
myPartitionSettings.setPartitioningEnabled(true);
myMdmSettings.setSearchAllPartitionForMatch(false);
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(1).setName(PARTITION_1), null);
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(2).setName(PARTITION_2), null);
assertLinkCount(1);

View File

@ -148,6 +148,7 @@ public class MdmProviderMergeGoldenResourcesR4Test extends BaseProviderR4Test {
@Test
public void testMergeOnDifferentPartitions() {
myPartitionSettings.setPartitioningEnabled(true);
myMdmSettings.setSearchAllPartitionForMatch(false);
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(1).setName(PARTITION_1), null);
RequestPartitionId requestPartitionId1 = RequestPartitionId.fromPartitionId(1);
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(2).setName(PARTITION_2), null);

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -153,10 +153,6 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase
myUpdated = theUpdated;
}
public void setUpdated(InstantDt theUpdated) {
myUpdated = theUpdated.getValue();
}
@Override
public abstract long getVersion();

View File

@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.model.search.ResourceTableRoutingBinder;
import ca.uhn.fhir.jpa.model.search.SearchParamTextPropertyBinder;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.Session;
@ -59,6 +60,7 @@ import javax.persistence.Index;
import javax.persistence.NamedEntityGraph;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.PostPersist;
import javax.persistence.PrePersist;
import javax.persistence.PreUpdate;
import javax.persistence.Table;
@ -67,12 +69,13 @@ import javax.persistence.Version;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
@Indexed(routingBinder= @RoutingBinderRef(type = ResourceTableRoutingBinder.class))
@Indexed(routingBinder = @RoutingBinderRef(type = ResourceTableRoutingBinder.class))
@Entity
@Table(name = ResourceTable.HFJ_RESOURCE, uniqueConstraints = {}, indexes = {
// Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE
@ -83,23 +86,22 @@ import java.util.stream.Collectors;
@NamedEntityGraph(name = "Resource.noJoins")
public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource<JpaPid> {
public static final int RESTYPE_LEN = 40;
private static final int MAX_LANGUAGE_LENGTH = 20;
private static final long serialVersionUID = 1L;
public static final String HFJ_RESOURCE = "HFJ_RESOURCE";
public static final String RES_TYPE = "RES_TYPE";
private static final int MAX_LANGUAGE_LENGTH = 20;
private static final long serialVersionUID = 1L;
/**
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
* Note the extra config needed in HS6 for indexing transient props:
* https://docs.jboss.org/hibernate/search/6.0/migration/html_single/#indexed-transient-requires-configuration
*
* <p>
* Note that we depend on `myVersion` updated for this field to be indexed.
*/
@Transient
@FullTextField(name = "myContentText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer")
@FullTextField(name = "myContentTextEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
@FullTextField(name = "myContentTextNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
@FullTextField(name = "myContentTextPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
@FullTextField(name = "myContentTextEdgeNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
@FullTextField(name = "myContentTextNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
@FullTextField(name = "myContentTextPhonetic", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
@OptimisticLock(excluded = true)
@IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion")))
private String myContentText;
@ -133,9 +135,9 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
*/
@Transient()
@FullTextField(name = "myNarrativeText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer")
@FullTextField(name = "myNarrativeTextEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
@FullTextField(name = "myNarrativeTextNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
@FullTextField(name = "myNarrativeTextPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
@FullTextField(name = "myNarrativeTextEdgeNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
@FullTextField(name = "myNarrativeTextNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
@FullTextField(name = "myNarrativeTextPhonetic", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
@OptimisticLock(excluded = true)
@IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion")))
private String myNarrativeText;
@ -176,7 +178,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@Column(name = "SP_QUANTITY_PRESENT")
@OptimisticLock(excluded = true)
private boolean myParamsQuantityPopulated;
/**
* Added to support UCUM conversion
* since 5.3.0
@ -184,9 +186,9 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false)
@OptimisticLock(excluded = true)
private Collection<ResourceIndexedSearchParamQuantityNormalized> myParamsQuantityNormalized;
/**
* Added to support UCUM conversion,
* Added to support UCUM conversion,
* NOTE : use Boolean class instead of boolean primitive, in order to set the existing rows to null
* since 5.3.0
*/
@ -278,18 +280,17 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@Transient
private transient boolean myUnchangedInCurrentOperation;
/**
* The id of the Resource.
* Will contain either the client-assigned id, or the sequence value.
* Will be null during insert time until the first read.
*
*/
@Column(name= "FHIR_ID",
@Column(name = "FHIR_ID",
// [A-Za-z0-9\-\.]{1,64} - https://www.hl7.org/fhir/datatypes.html#id
length = 64,
// we never update this after insert, and the Generator will otherwise "dirty" the object.
updatable = false)
// inject the pk for server-assigned sequence ids.
@GeneratorType(when = GenerationTime.INSERT, type = FhirIdGenerator.class)
// Make sure the generator doesn't bump the history version.
@ -305,30 +306,21 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@Column(name = "SEARCH_URL_PRESENT", nullable = true)
private Boolean mySearchUrlPresent = false;
/**
* Populate myFhirId with server-assigned sequence id when no client-id provided.
* We eat this complexity during insert to simplify query time with a uniform column.
* Server-assigned sequence ids aren't available until just before insertion.
* Hibernate calls insert Generators after the pk has been assigned, so we can use myId safely here.
*/
public static final class FhirIdGenerator implements ValueGenerator<String> {
@Override
public String generateValue(Session session, Object owner) {
ResourceTable that = (ResourceTable) owner;
return that.myFhirId != null ? that.myFhirId : that.myId.toString();
}
}
@Version
@Column(name = "RES_VER")
private long myVersion;
@OneToMany(mappedBy = "myResourceTable", fetch = FetchType.LAZY)
private Collection<ResourceHistoryProvenanceEntity> myProvenance;
@Transient
private transient ResourceHistoryTable myCurrentVersionEntity;
@Transient
private transient ResourceHistoryTable myNewVersionEntity;
@Transient
private transient boolean myVersionUpdatedInCurrentTransaction;
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false, mappedBy = "myResource")
@OptimisticLock(excluded = true)
private ForcedId myForcedId;
@ -343,6 +335,39 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
super();
}
/**
* Setting this flag is an indication that we're making changes and the version number will
* be incremented in the current transaction. When this is set, calls to {@link #getVersion()}
* will be incremented by one.
* This flag is cleared in {@link #postPersist()} since at that time the new version number
* should be reflected.
*/
public void markVersionUpdatedInCurrentTransaction() {
if (!myVersionUpdatedInCurrentTransaction) {
/*
* Note that modifying this number doesn't actually directly affect what
* gets stored in the database since this is a @Version field and the
* value is therefore managed by Hibernate. So in other words, if the
* row in the database is updated, it doesn't matter what we set
* this field to, hibernate will increment it by one. However, we still
* increment it for two reasons:
* 1. The value gets used for the version attribute in the ResourceHistoryTable
* entity we create for each new version.
* 2. For updates to existing resources, there may actually not be any other
* changes to this entity so incrementing this is a signal to
* Hibernate that something changed and we need to force an entity
* update.
*/
myVersion++;
this.myVersionUpdatedInCurrentTransaction = true;
}
}
@PostPersist
public void postPersist() {
myVersionUpdatedInCurrentTransaction = false;
}
@Override
public ResourceTag addTag(TagDefinition theTag) {
for (ResourceTag next : getTags()) {
@ -355,7 +380,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
return tag;
}
public String getHashSha256() {
return myHashSha256;
}
@ -558,6 +582,26 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
return myVersion;
}
/**
* Sets the version on this entity to {@literal 1}. This should only be called
* on resources that are not yet persisted. After that time the version number
* is managed by hibernate.
*/
public void initializeVersion() {
assert myId == null;
myVersion = 1;
}
/**
* Don't call this in any JPA environments, the version will be ignored
* since this field is managed by hibernate
*/
@VisibleForTesting
public void setVersionForUnitTest(long theVersion) {
myVersion = theVersion;
}
@Override
public boolean isDeleted() {
return getDeleted() != null;
@ -568,10 +612,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
setDeleted(null);
}
public void setVersion(long theVersion) {
myVersion = theVersion;
}
public boolean isHasLinks() {
return myHasLinks;
}
@ -580,6 +620,23 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
myHasLinks = theHasLinks;
}
/**
* Clears all the index population flags, e.g. {@link #isParamsStringPopulated()}
*
* @since 6.8.0
*/
public void clearAllParamsPopulated() {
myParamsTokenPopulated = false;
myParamsCoordsPopulated = false;
myParamsDatePopulated = false;
myParamsNumberPopulated = false;
myParamsStringPopulated = false;
myParamsQuantityPopulated = false;
myParamsQuantityNormalizedPopulated = false;
myParamsUriPopulated = false;
myHasLinks = false;
}
public boolean isParamsComboStringUniquePresent() {
if (myParamsComboStringUniquePresent == null) {
return false;
@ -633,7 +690,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
public void setParamsQuantityPopulated(boolean theParamsQuantityPopulated) {
myParamsQuantityPopulated = theParamsQuantityPopulated;
}
public Boolean isParamsQuantityNormalizedPopulated() {
if (myParamsQuantityNormalizedPopulated == null)
return Boolean.FALSE;
@ -689,14 +746,14 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
myUnchangedInCurrentOperation = theUnchangedInCurrentOperation;
}
public void setContentText(String theContentText) {
myContentText = theContentText;
}
public String getContentText() {
return myContentText;
}
public void setContentText(String theContentText) {
myContentText = theContentText;
}
public void setNarrativeText(String theNarrativeText) {
myNarrativeText = theNarrativeText;
}
@ -709,12 +766,27 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
mySearchUrlPresent = theSearchUrlPresent;
}
/**
* This method creates a new history entity, or might reuse the current one if we've
* already created one in the current transaction. This is because we can only increment
* the version once in a DB transaction (since hibernate manages that number) so creating
* multiple {@link ResourceHistoryTable} entities will result in a constraint error.
*/
public ResourceHistoryTable toHistory(boolean theCreateVersionTags) {
ResourceHistoryTable retVal = new ResourceHistoryTable();
boolean createVersionTags = theCreateVersionTags;
ResourceHistoryTable retVal = myNewVersionEntity;
if (retVal == null) {
retVal = new ResourceHistoryTable();
myNewVersionEntity = retVal;
} else {
// Tags should already be set
createVersionTags = false;
}
retVal.setResourceId(myId);
retVal.setResourceType(myResourceType);
retVal.setVersion(myVersion);
retVal.setVersion(getVersion());
retVal.setTransientForcedId(getTransientForcedId());
retVal.setPublished(getPublishedDate());
@ -725,10 +797,8 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
retVal.setForcedId(getForcedId());
retVal.setPartitionId(getPartitionId());
retVal.getTags().clear();
retVal.setHasTags(isHasTags());
if (isHasTags() && theCreateVersionTags) {
if (isHasTags() && createVersionTags) {
for (ResourceTag next : getTags()) {
retVal.addTag(next);
}
@ -772,16 +842,16 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
* This is a convenience to avoid loading the version a second time within a single transaction. It is
* not persisted.
*/
public void setCurrentVersionEntity(ResourceHistoryTable theCurrentVersionEntity) {
myCurrentVersionEntity = theCurrentVersionEntity;
public ResourceHistoryTable getCurrentVersionEntity() {
return myCurrentVersionEntity;
}
/**
* This is a convenience to avoid loading the version a second time within a single transaction. It is
* not persisted.
*/
public ResourceHistoryTable getCurrentVersionEntity() {
return myCurrentVersionEntity;
public void setCurrentVersionEntity(ResourceHistoryTable theCurrentVersionEntity) {
myCurrentVersionEntity = theCurrentVersionEntity;
}
@Override
@ -799,8 +869,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
myForcedId = theForcedId;
}
@Override
public IdDt getIdDt() {
IdDt retVal = new IdDt();
@ -808,7 +876,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
return retVal;
}
public IIdType getIdType(FhirContext theContext) {
IIdType retVal = theContext.getVersion().newIdType();
populateId(retVal);
@ -830,14 +897,14 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
}
}
public void setCreatedByMatchUrl(String theCreatedByMatchUrl) {
myCreatedByMatchUrl = theCreatedByMatchUrl;
}
public String getCreatedByMatchUrl() {
return myCreatedByMatchUrl;
}
public void setCreatedByMatchUrl(String theCreatedByMatchUrl) {
myCreatedByMatchUrl = theCreatedByMatchUrl;
}
public void setLuceneIndexData(ExtendedHSearchIndexData theLuceneIndexData) {
myLuceneIndexData = theLuceneIndexData;
}
@ -862,4 +929,18 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
public void setFhirId(String theFhirId) {
myFhirId = theFhirId;
}
/**
* Populate myFhirId with server-assigned sequence id when no client-id provided.
* We eat this complexity during insert to simplify query time with a uniform column.
* Server-assigned sequence ids aren't available until just before insertion.
* Hibernate calls insert Generators after the pk has been assigned, so we can use myId safely here.
*/
public static final class FhirIdGenerator implements ValueGenerator<String> {
@Override
public String generateValue(Session session, Object owner) {
ResourceTable that = (ResourceTable) owner;
return that.myFhirId != null ? that.myFhirId : that.myId.toString();
}
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -224,6 +224,9 @@ public class SearchParamExtractorService {
@Override
public IBaseResource fetchResourceAtPath(@Nonnull PathAndRef thePathAndRef) {
if (thePathAndRef.getRef() == null) {
return null;
}
return findContainedResource(containedResources, thePathAndRef.getRef());
}
};

View File

@ -94,7 +94,7 @@ public class SearchParamRegistryImplTest {
ResourceTable searchParamEntity = new ResourceTable();
searchParamEntity.setResourceType("SearchParameter");
searchParamEntity.setId(theId);
searchParamEntity.setVersion(theVersion);
searchParamEntity.setVersionForUnitTest(theVersion);
return searchParamEntity;
}
@ -199,7 +199,7 @@ public class SearchParamRegistryImplTest {
// Update the resource without changing anything that would affect our cache
ResourceTable lastEntity = newEntities.get(newEntities.size() - 1);
lastEntity.setVersion(2);
lastEntity.setVersionForUnitTest(2);
resetMock(Enumerations.PublicationStatus.ACTIVE, newEntities);
mySearchParamRegistry.requestRefresh();
assertResult(mySearchParamRegistry.refreshCacheIfNecessary(), 0, 1, 0);

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.subscription.channel.subscription;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -60,6 +61,7 @@ class SubscriptionChannelCache {
return myCache.containsKey(theChannelName);
}
@VisibleForTesting
void logForUnitTest() {
for (String key : myCache.keySet()) {
ourLog.info("SubscriptionChannelCache: {}", key);

View File

@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.subscription.channel.models.ReceivingChannelParameters;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.subscription.model.ChannelRetryConfiguration;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import org.slf4j.Logger;
@ -147,4 +148,9 @@ public class SubscriptionChannelRegistry {
public synchronized int size() {
return myDeliveryReceiverChannels.size();
}
@VisibleForTesting
public void logForUnitTest() {
myDeliveryReceiverChannels.logForUnitTest();
}
}

View File

@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
@ -93,17 +92,15 @@ public class SubscriptionRegisteringSubscriber implements MessageHandler {
// - in order to store partition id in the userdata of the resource for partitioned subscriptions
// - in case we're processing out of order and a create-then-delete has been processed backwards (or vice versa)
IBaseResource payloadResource;
IIdType payloadId = payload.getPayloadId(myFhirContext).toUnqualifiedVersionless();
try {
IFhirResourceDao<?> subscriptionDao = myDaoRegistry.getResourceDao("Subscription");
RequestDetails systemRequestDetails = getPartitionAwareRequestDetails(payload);
payloadResource = subscriptionDao.read(payloadId, systemRequestDetails);
if (payloadResource == null) {
// Only for unit test
payloadResource = payload.getPayload(myFhirContext);
}
} catch (ResourceGoneException e) {
IFhirResourceDao<?> subscriptionDao = myDaoRegistry.getResourceDao("Subscription");
RequestDetails systemRequestDetails = getPartitionAwareRequestDetails(payload);
IBaseResource payloadResource = subscriptionDao.read(payloadId, systemRequestDetails, true);
if (payloadResource == null) {
// Only for unit test
payloadResource = payload.getPayload(myFhirContext);
}
if (payloadResource.isDeleted()) {
mySubscriptionRegistry.unregisterSubscriptionIfRegistered(payloadId.getIdPart());
return;
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.topic;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_43_50;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r5.model.SubscriptionTopic;
@ -31,11 +32,10 @@ public final class SubscriptionTopicCanonicalizer {
}
// WIP STR5 use elsewhere
public static SubscriptionTopic canonicalize(FhirContext theFhirContext, IBaseResource theSubscriptionTopic) {
public static SubscriptionTopic canonicalizeTopic(FhirContext theFhirContext, IBaseResource theSubscriptionTopic) {
switch (theFhirContext.getVersion().getVersion()) {
case R4B:
String encoded = theFhirContext.newJsonParser().encodeResourceToString(theSubscriptionTopic);
return ourFhirContextR5.newJsonParser().parseResource(SubscriptionTopic.class, encoded);
return (SubscriptionTopic) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.SubscriptionTopic) theSubscriptionTopic);
case R5:
return (SubscriptionTopic) theSubscriptionTopic;
default:

View File

@ -26,11 +26,11 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionConstants;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.util.Logs;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
@ -40,7 +40,7 @@ import java.util.Set;
public class SubscriptionTopicLoader extends BaseResourceCacheSynchronizer {
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTopicLoader.class);
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
@Autowired
private FhirContext myFhirContext;
@ -107,10 +107,7 @@ public class SubscriptionTopicLoader extends BaseResourceCacheSynchronizer {
if (theResource instanceof SubscriptionTopic) {
return (SubscriptionTopic) theResource;
} else if (theResource instanceof org.hl7.fhir.r4b.model.SubscriptionTopic) {
return myFhirContext.newJsonParser().parseResource(SubscriptionTopic.class, FhirContext.forR4BCached().newJsonParser().encodeResourceToString(theResource));
// WIP STR5 VersionConvertorFactory_43_50 when it supports SubscriptionTopic
// track here: https://github.com/hapifhir/org.hl7.fhir.core/issues/1212
// return (SubscriptionTopic) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.SubscriptionTopic) theResource);
return SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, theResource);
} else {
throw new IllegalArgumentException(Msg.code(2332) + "Only R4B and R5 SubscriptionTopic is currently supported. Found " + theResource.getClass());
}

View File

@ -29,11 +29,11 @@ import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.util.Logs;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHandler;
@ -45,7 +45,7 @@ import java.util.List;
import java.util.UUID;
public class SubscriptionTopicMatchingSubscriber implements MessageHandler {
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTopicMatchingSubscriber.class);
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
private final FhirContext myFhirContext;
@Autowired

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.util.BundleBuilder;
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_43_50;
import org.hl7.fhir.instance.model.api.IBaseBundle;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r5.model.Bundle;
@ -55,11 +56,7 @@ public class SubscriptionTopicPayloadBuilder {
if (fhirVersion == FhirVersionEnum.R4B) {
bundleBuilder.setType(Bundle.BundleType.HISTORY.toCode());
String serializedSubscriptionStatus = FhirContext.forR5Cached().newJsonParser().encodeResourceToString(subscriptionStatus);
subscriptionStatus = myFhirContext.newJsonParser().parseResource(org.hl7.fhir.r4b.model.SubscriptionStatus.class, serializedSubscriptionStatus);
// WIP STR5 VersionConvertorFactory_43_50 when it supports SubscriptionStatus
// track here: https://github.com/hapifhir/org.hl7.fhir.core/issues/1212
// subscriptionStatus = (SubscriptionStatus) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.SubscriptionStatus) subscriptionStatus);
subscriptionStatus = VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r5.model.SubscriptionStatus) subscriptionStatus);
} else if (fhirVersion == FhirVersionEnum.R5) {
bundleBuilder.setType(Bundle.BundleType.SUBSCRIPTIONNOTIFICATION.toCode());
} else {

View File

@ -28,12 +28,12 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.util.Logs;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHandler;
@ -48,7 +48,8 @@ import javax.annotation.Nonnull;
* Also validates criteria. If invalid, rejects the subscription without persisting the subscription.
*/
public class SubscriptionTopicRegisteringSubscriber implements MessageHandler {
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTopicRegisteringSubscriber.class);
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
@Autowired
private FhirContext myFhirContext;
@Autowired
@ -106,7 +107,7 @@ public class SubscriptionTopicRegisteringSubscriber implements MessageHandler {
return;
}
SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalize(myFhirContext, payloadResource);
SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, payloadResource);
if (subscriptionTopic.getStatus() == Enumerations.PublicationStatus.ACTIVE) {
mySubscriptionTopicRegistry.register(subscriptionTopic);
} else {

View File

@ -30,14 +30,15 @@ import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.Logs;
import com.google.common.annotations.VisibleForTesting;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SubscriptionTopicValidatingInterceptor {
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTopicValidatingInterceptor.class);
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
private final FhirContext myFhirContext;
private final SubscriptionQueryValidator mySubscriptionQueryValidator;
@ -69,7 +70,7 @@ public class SubscriptionTopicValidatingInterceptor {
return;
}
SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalize(myFhirContext, theSubscription);
SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, theSubscription);
boolean finished = false;
if (subscriptionTopic.getStatus() == null) {

View File

@ -24,17 +24,18 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
import ca.uhn.fhir.util.Logs;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Enumeration;
import org.hl7.fhir.r5.model.SubscriptionTopic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
public class SubscriptionTriggerMatcher {
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTriggerMatcher.class);
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
private final SubscriptionTopicSupport mySubscriptionTopicSupport;
private final BaseResourceMessage.OperationTypeEnum myOperation;
private final SubscriptionTopic.SubscriptionTopicResourceTriggerComponent myTrigger;

View File

@ -4,17 +4,19 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
import ca.uhn.fhir.rest.server.messaging.json.ResourceOperationJsonMessage;
import org.hl7.fhir.r4.model.InstantType;
import org.hl7.fhir.r4.model.Subscription;
import org.hl7.fhir.r4.model.codesystems.SubscriptionStatus;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@ -34,6 +36,7 @@ import java.util.List;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@ -46,8 +49,8 @@ public class SubscriptionRegisteringSubscriberTest {
private FhirContext myFhirContext = FhirContext.forR4Cached();
@Mock
private SubscriptionRegistry mySubscriptionRegistry;
@Mock
private SubscriptionCanonicalizer mySubscriptionCanonicalizer;
@Spy
private SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(myFhirContext);
@Mock
private DaoRegistry myDaoRegistry;
@Mock
@ -61,8 +64,15 @@ public class SubscriptionRegisteringSubscriberTest {
@BeforeEach
public void beforeEach() {
mySubscription = new Subscription();
mySubscription.setId("Subscription/testrest");
mySubscription = buildSubscription();
}
@NotNull
private static Subscription buildSubscription() {
Subscription subscription = new Subscription();
subscription.setId("Subscription/testrest");
subscription.setStatus(Subscription.SubscriptionStatus.ACTIVE);
return subscription;
}
@Test
@ -79,7 +89,9 @@ public class SubscriptionRegisteringSubscriberTest {
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
when(mySubscriptionDao.read(any(), any())).thenThrow(ResourceGoneException.class);
Subscription deletedSubscription = buildSubscription();
ResourceMetadataKeyEnum.DELETED_AT.put(deletedSubscription, InstantType.withCurrentTime());
when(mySubscriptionDao.read(any(), any(), eq(true))).thenReturn(deletedSubscription);
mySubscriptionRegisteringSubscriber.handleMessage(message);
verify(mySubscriptionRegistry, times(1)).unregisterSubscriptionIfRegistered(any());
@ -92,7 +104,7 @@ public class SubscriptionRegisteringSubscriberTest {
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
when(mySubscriptionDao.read(any(), any())).thenReturn(mySubscription);
when(mySubscriptionDao.read(any(), any(), eq(true))).thenReturn(mySubscription);
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
mySubscriptionRegisteringSubscriber.handleMessage(message);
@ -106,7 +118,7 @@ public class SubscriptionRegisteringSubscriberTest {
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
when(mySubscriptionDao.read(any(), any())).thenReturn(mySubscription);
when(mySubscriptionDao.read(any(), any(), eq(true))).thenReturn(mySubscription);
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ERROR.toCode());
mySubscriptionRegisteringSubscriber.handleMessage(message);
@ -126,7 +138,7 @@ public class SubscriptionRegisteringSubscriberTest {
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
when(mySubscriptionDao.read(any(), requestDetailsCaptor.capture())).thenReturn(mySubscription);
when(mySubscriptionDao.read(any(), requestDetailsCaptor.capture(), eq(true))).thenReturn(mySubscription);
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
mySubscriptionRegisteringSubscriber.handleMessage(message);
@ -147,7 +159,7 @@ public class SubscriptionRegisteringSubscriberTest {
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
when(mySubscriptionDao.read(any(), requestDetailsCaptor.capture())).thenReturn(mySubscription);
when(mySubscriptionDao.read(any(), requestDetailsCaptor.capture(), eq(true))).thenReturn(mySubscription);
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
mySubscriptionRegisteringSubscriber.handleMessage(message);

View File

@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.subscription.channel.subscription.IChannelNamer;
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
import ca.uhn.fhir.jpa.subscription.module.config.MockFhirClientSearchParamProvider;
import ca.uhn.fhir.jpa.subscription.util.SubscriptionDebugLogInterceptor;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.system.HapiSystemProperties;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -35,6 +36,7 @@ import static org.mockito.Mockito.mock;
BaseSubscriptionTest.MyConfig.class
})
public abstract class BaseSubscriptionTest {
private static final SubscriptionDebugLogInterceptor ourSubscriptionDebugLogInterceptor = new SubscriptionDebugLogInterceptor();
static {
HapiSystemProperties.enableUnitTestMode();
@ -52,11 +54,13 @@ public abstract class BaseSubscriptionTest {
@BeforeEach
public void before() {
mySearchParamRegistry.handleInit(Collections.emptyList());
myInterceptorRegistry.registerInterceptor(ourSubscriptionDebugLogInterceptor);
}
@AfterEach
public void afterClearAnonymousLambdas() {
myInterceptorRegistry.unregisterAllInterceptors();
myInterceptorRegistry.unregisterInterceptor(ourSubscriptionDebugLogInterceptor);
}
public void initSearchParamRegistry(IBaseResource theReadResource) {
@ -68,7 +72,7 @@ public abstract class BaseSubscriptionTest {
public static class MyConfig {
@Bean
public JpaStorageSettings storageSettings() {
public JpaStorageSettings jpaStorageSettings() {
return new JpaStorageSettings();
}

View File

@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.subscription.module.config;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.subscription.match.matcher.matching.InMemorySubscriptionMatcher;
import ca.uhn.fhir.rest.client.api.IGenericClient;
import org.springframework.context.annotation.Bean;
@ -25,11 +24,6 @@ public class TestSubscriptionConfig {
return new PartitionSettings();
}
@Bean
public StorageSettings storageSettings() {
return new StorageSettings();
}
@Bean
public IGenericClient fhirClient() {
return mock(IGenericClient.class);

View File

@ -11,7 +11,6 @@ import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.subscription.channel.api.ChannelConsumerSettings;
import ca.uhn.fhir.jpa.subscription.channel.subscription.ISubscriptionDeliveryChannelNamer;
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscriptionChannelType;
@ -100,8 +99,6 @@ public abstract class BaseBlockingQueueSubscribableChannelDstu3Test extends Base
@Autowired
IInterceptorService myInterceptorRegistry;
@Autowired
private SubscriptionLoader mySubscriptionLoader;
@Autowired
private ISubscriptionDeliveryChannelNamer mySubscriptionDeliveryChannelNamer;
@BeforeEach
@ -131,6 +128,8 @@ public abstract class BaseBlockingQueueSubscribableChannelDstu3Test extends Base
mySubscriptionMatchingPost.clear();
mySubscriptionActivatedPost.clear();
ourObservationListener.clear();
mySubscriptionResourceMatched.clear();
mySubscriptionResourceNotMatched.clear();
super.clearRegistry();
}
@ -148,9 +147,11 @@ public abstract class BaseBlockingQueueSubscribableChannelDstu3Test extends Base
}
protected Subscription sendSubscription(Subscription theSubscription, RequestPartitionId theRequestPartitionId, Boolean mockDao) throws InterruptedException {
mySubscriptionResourceNotMatched.setExpectedCount(1);
mySubscriptionActivatedPost.setExpectedCount(1);
Subscription retVal = sendResource(theSubscription, theRequestPartitionId);
mySubscriptionActivatedPost.awaitExpected();
mySubscriptionResourceNotMatched.awaitExpected();
return retVal;
}

View File

@ -35,7 +35,9 @@ public class SubscriptionCheckingSubscriberTest extends BaseBlockingQueueSubscri
assertEquals(2, mySubscriptionRegistry.size());
ourObservationListener.setExpectedCount(1);
mySubscriptionResourceMatched.setExpectedCount(1);
sendObservation(code, "SNOMED-CT");
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.awaitExpected();
assertEquals(1, ourContentTypes.size());
@ -58,7 +60,9 @@ public class SubscriptionCheckingSubscriberTest extends BaseBlockingQueueSubscri
assertEquals(2, mySubscriptionRegistry.size());
ourObservationListener.setExpectedCount(1);
mySubscriptionResourceMatched.setExpectedCount(1);
sendObservation(code, "SNOMED-CT");
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.awaitExpected();
assertEquals(1, ourContentTypes.size());
@ -82,7 +86,9 @@ public class SubscriptionCheckingSubscriberTest extends BaseBlockingQueueSubscri
mySubscriptionAfterDelivery.setExpectedCount(1);
ourObservationListener.setExpectedCount(0);
mySubscriptionResourceMatched.setExpectedCount(1);
sendObservation(code, "SNOMED-CT");
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.clear();
mySubscriptionAfterDelivery.awaitExpected();
@ -120,7 +126,9 @@ public class SubscriptionCheckingSubscriberTest extends BaseBlockingQueueSubscri
observation.setStatus(Observation.ObservationStatus.FINAL);
mySubscriptionResourceMatched.setExpectedCount(1);
sendResource(observation);
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.awaitExpected();
assertEquals(1, ourContentTypes.size());

View File

@ -76,7 +76,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
assertEquals(2, mySubscriptionRegistry.size());
ourObservationListener.setExpectedCount(1);
mySubscriptionResourceMatched.setExpectedCount(1);
sendObservation(code, "SNOMED-CT");
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.awaitExpected();
assertEquals(1, ourContentTypes.size());
@ -99,7 +101,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
assertEquals(2, mySubscriptionRegistry.size());
ourObservationListener.setExpectedCount(1);
mySubscriptionResourceMatched.setExpectedCount(1);
sendObservation(code, "SNOMED-CT");
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.awaitExpected();
assertEquals(1, ourContentTypes.size());
@ -117,7 +121,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
assertEquals(1, mySubscriptionRegistry.size());
ourObservationListener.setExpectedCount(1);
mySubscriptionResourceMatched.setExpectedCount(1);
sendResource(observation);
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.awaitExpected();
assertEquals(1, ourContentTypes.size());
@ -141,7 +147,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
mySubscriptionAfterDelivery.setExpectedCount(1);
ourObservationListener.setExpectedCount(0);
mySubscriptionResourceMatched.setExpectedCount(1);
sendObservation(code, "SNOMED-CT");
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.clear();
mySubscriptionAfterDelivery.awaitExpected();
@ -168,7 +176,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
assertEquals(3, mySubscriptionRegistry.size());
ourObservationListener.setExpectedCount(2);
mySubscriptionResourceMatched.setExpectedCount(2);
sendObservation(code, "SNOMED-CT");
mySubscriptionResourceMatched.awaitExpected();
ourObservationListener.awaitExpected();
assertEquals(2, ourContentTypes.size());
@ -401,7 +411,7 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
Subscription modifiedSubscription = subscription.copy();
// the original partition info was the request info, but we need the actual storage partition.
modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId);
when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any())).thenReturn(modifiedSubscription);
when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any(), eq(true))).thenReturn(modifiedSubscription);
}
@Nested

View File

@ -0,0 +1,20 @@
package ca.uhn.fhir.jpa.topic;
import ca.uhn.fhir.context.FhirContext;
import org.hl7.fhir.r4b.model.Enumerations;
import org.hl7.fhir.r4b.model.SubscriptionTopic;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class SubscriptionTopicCanonicalizerTest {
@Test
public void testCanonicalizeTopic() {
SubscriptionTopic topic = new SubscriptionTopic();
topic.setId("123");
topic.setStatus(Enumerations.PublicationStatus.ACTIVE);
org.hl7.fhir.r5.model.SubscriptionTopic canonicalized = SubscriptionTopicCanonicalizer.canonicalizeTopic(FhirContext.forR4BCached(), topic);
assertEquals("123", canonicalized.getId());
assertEquals(org.hl7.fhir.r5.model.Enumerations.PublicationStatus.ACTIVE, canonicalized.getStatus());
}
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.7.0-SNAPSHOT</version>
<version>6.7.1-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -104,7 +104,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@ -693,7 +692,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
IBundleProvider history = myPatientDao.history(null, null, null, mySrd);
assertEquals(4 + initialHistory, history.sizeOrThrowNpe());
List<IBaseResource> resources = history.getResources(0, 4);
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) resources.get(0)));
assertTrue(resources.get(0).isDeleted());
try {
myPatientDao.delete(id2, mySrd);
@ -796,10 +795,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
IBundleProvider history = myPatientDao.history(id, null, null, null, mySrd);
assertEquals(2, history.size().intValue());
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(0, 1).get(0)));
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(0, 1).get(0)).getValue());
assertNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(1, 2).get(0)));
assertTrue(history.getResources(0, 1).get(0).isDeleted());
assertFalse(history.getResources(1, 2).get(0).isDeleted());
}
@Test
@ -1206,13 +1203,13 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
assertEquals(id.withVersion("2"), entries.get(1).getIdElement());
assertEquals(id.withVersion("1"), entries.get(2).getIdElement());
assertNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) entries.get(0)));
assertFalse(entries.get(0).isDeleted());
assertEquals(BundleEntryTransactionMethodEnum.PUT, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(0)));
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) entries.get(1)));
assertTrue(entries.get(1).isDeleted());
assertEquals(BundleEntryTransactionMethodEnum.DELETE, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(1)));
assertNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) entries.get(2)));
assertFalse(entries.get(2).isDeleted());
assertEquals(BundleEntryTransactionMethodEnum.POST, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(2)));
}

View File

@ -5,7 +5,6 @@ import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
@ -63,6 +62,7 @@ import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
@ -739,10 +739,8 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
IBundleProvider history = myPatientDao.history(id, null, null, null, mySrd);
assertEquals(2, history.size().intValue());
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(0, 1).get(0)));
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(0, 1).get(0)).getValue());
assertNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(1, 2).get(0)));
assertTrue(history.getResources(0, 1).get(0).isDeleted());
assertFalse(history.getResources(1, 2).get(0).isDeleted());
}
@Test

Some files were not shown because too many files have changed in this diff Show More