mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-02-08 05:58:27 +00:00
Merge branch 'master' of https://github.com/hapifhir/hapi-fhir
This commit is contained in:
commit
c2a8fb1946
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -23,8 +23,6 @@ import com.google.common.collect.ArrayListMultimap;
|
|||||||
import com.google.common.collect.ListMultimap;
|
import com.google.common.collect.ListMultimap;
|
||||||
import com.google.common.collect.Multimaps;
|
import com.google.common.collect.Multimaps;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
@ -136,8 +134,14 @@ public class HookParams {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE)
|
StringBuilder b = new StringBuilder();
|
||||||
.append("params", myParams)
|
myParams.forEach((key, value) -> {
|
||||||
.toString();
|
b.append(" ")
|
||||||
|
.append(key.getSimpleName())
|
||||||
|
.append(": ")
|
||||||
|
.append(value)
|
||||||
|
.append("\n");
|
||||||
|
});
|
||||||
|
return b.toString();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,8 @@ import ca.uhn.fhir.util.DateUtils;
|
|||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
|
||||||
|
import javax.annotation.Nonnull;
|
||||||
|
import javax.annotation.Nullable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -355,17 +357,21 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
|
|||||||
if (myLowerBound == null || myLowerBound.getValue() == null) {
|
if (myLowerBound == null || myLowerBound.getValue() == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
Date retVal = myLowerBound.getValue();
|
return getLowerBoundAsInstant(myLowerBound);
|
||||||
|
}
|
||||||
|
|
||||||
if (myLowerBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
|
@Nonnull
|
||||||
|
private static Date getLowerBoundAsInstant(@Nonnull DateParam theLowerBound) {
|
||||||
|
Date retVal = theLowerBound.getValue();
|
||||||
|
if (theLowerBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
|
||||||
retVal = DateUtils.getLowestInstantFromDate(retVal);
|
retVal = DateUtils.getLowestInstantFromDate(retVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (myLowerBound.getPrefix() != null) {
|
if (theLowerBound.getPrefix() != null) {
|
||||||
switch (myLowerBound.getPrefix()) {
|
switch (theLowerBound.getPrefix()) {
|
||||||
case GREATERTHAN:
|
case GREATERTHAN:
|
||||||
case STARTS_AFTER:
|
case STARTS_AFTER:
|
||||||
retVal = myLowerBound.getPrecision().add(retVal, 1);
|
retVal = theLowerBound.getPrecision().add(retVal, 1);
|
||||||
break;
|
break;
|
||||||
case EQUAL:
|
case EQUAL:
|
||||||
case NOT_EQUAL:
|
case NOT_EQUAL:
|
||||||
@ -375,7 +381,7 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
|
|||||||
case APPROXIMATE:
|
case APPROXIMATE:
|
||||||
case LESSTHAN_OR_EQUALS:
|
case LESSTHAN_OR_EQUALS:
|
||||||
case ENDS_BEFORE:
|
case ENDS_BEFORE:
|
||||||
throw new IllegalStateException(Msg.code(1928) + "Invalid lower bound comparator: " + myLowerBound.getPrefix());
|
throw new IllegalStateException(Msg.code(1928) + "Invalid lower bound comparator: " + theLowerBound.getPrefix());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return retVal;
|
return retVal;
|
||||||
@ -417,14 +423,19 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
Date retVal = myUpperBound.getValue();
|
return getUpperBoundAsInstant(myUpperBound);
|
||||||
|
}
|
||||||
|
|
||||||
if (myUpperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
|
@Nonnull
|
||||||
|
private static Date getUpperBoundAsInstant(@Nonnull DateParam theUpperBound) {
|
||||||
|
Date retVal = theUpperBound.getValue();
|
||||||
|
|
||||||
|
if (theUpperBound.getPrecision().ordinal() <= TemporalPrecisionEnum.DAY.ordinal()) {
|
||||||
retVal = DateUtils.getHighestInstantFromDate(retVal);
|
retVal = DateUtils.getHighestInstantFromDate(retVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (myUpperBound.getPrefix() != null) {
|
if (theUpperBound.getPrefix() != null) {
|
||||||
switch (myUpperBound.getPrefix()) {
|
switch (theUpperBound.getPrefix()) {
|
||||||
case LESSTHAN:
|
case LESSTHAN:
|
||||||
case ENDS_BEFORE:
|
case ENDS_BEFORE:
|
||||||
retVal = new Date(retVal.getTime() - 1L);
|
retVal = new Date(retVal.getTime() - 1L);
|
||||||
@ -432,14 +443,14 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
|
|||||||
case EQUAL:
|
case EQUAL:
|
||||||
case NOT_EQUAL:
|
case NOT_EQUAL:
|
||||||
case LESSTHAN_OR_EQUALS:
|
case LESSTHAN_OR_EQUALS:
|
||||||
retVal = myUpperBound.getPrecision().add(retVal, 1);
|
retVal = theUpperBound.getPrecision().add(retVal, 1);
|
||||||
retVal = new Date(retVal.getTime() - 1L);
|
retVal = new Date(retVal.getTime() - 1L);
|
||||||
break;
|
break;
|
||||||
case GREATERTHAN_OR_EQUALS:
|
case GREATERTHAN_OR_EQUALS:
|
||||||
case GREATERTHAN:
|
case GREATERTHAN:
|
||||||
case APPROXIMATE:
|
case APPROXIMATE:
|
||||||
case STARTS_AFTER:
|
case STARTS_AFTER:
|
||||||
throw new IllegalStateException(Msg.code(1929) + "Invalid upper bound comparator: " + myUpperBound.getPrefix());
|
throw new IllegalStateException(Msg.code(1929) + "Invalid upper bound comparator: " + theUpperBound.getPrefix());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return retVal;
|
return retVal;
|
||||||
@ -626,12 +637,14 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
|
|||||||
* are the same value. As such, even though the prefixes for the lower and
|
* are the same value. As such, even though the prefixes for the lower and
|
||||||
* upper bounds default to <code>ge</code> and <code>le</code> respectively,
|
* upper bounds default to <code>ge</code> and <code>le</code> respectively,
|
||||||
* the resulting prefix is effectively <code>eq</code> where only a single
|
* the resulting prefix is effectively <code>eq</code> where only a single
|
||||||
* date is provided - as required by the FHIR specificiation (i.e. "If no
|
* date is provided - as required by the FHIR specification (i.e. "If no
|
||||||
* prefix is present, the prefix <code>eq</code> is assumed").
|
* prefix is present, the prefix <code>eq</code> is assumed").
|
||||||
*/
|
*/
|
||||||
private void validateAndSet(DateParam lowerBound, DateParam upperBound) {
|
private void validateAndSet(DateParam lowerBound, DateParam upperBound) {
|
||||||
if (hasBound(lowerBound) && hasBound(upperBound)) {
|
if (hasBound(lowerBound) && hasBound(upperBound)) {
|
||||||
if (lowerBound.getValue().getTime() > upperBound.getValue().getTime()) {
|
Date lowerBoundAsInstant = getLowerBoundAsInstant(lowerBound);
|
||||||
|
Date upperBoundAsInstant = getUpperBoundAsInstant(upperBound);
|
||||||
|
if (lowerBoundAsInstant.after(upperBoundAsInstant)) {
|
||||||
throw new DataFormatException(Msg.code(1932) + format(
|
throw new DataFormatException(Msg.code(1932) + format(
|
||||||
"Lower bound of %s is after upper bound of %s",
|
"Lower bound of %s is after upper bound of %s",
|
||||||
lowerBound.getValueAsString(), upperBound.getValueAsString()));
|
lowerBound.getValueAsString(), upperBound.getValueAsString()));
|
||||||
|
@ -249,6 +249,22 @@ public class BundleBuilder {
|
|||||||
return new CreateBuilder(request);
|
return new CreateBuilder(request);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds an entry containing a delete (DELETE) request.
|
||||||
|
* Also sets the Bundle.type value to "transaction" if it is not already set.
|
||||||
|
* <p>
|
||||||
|
* Note that the resource is only used to extract its ID and type, and the body of the resource is not included in the entry,
|
||||||
|
*
|
||||||
|
* @param theCondition The conditional URL, e.g. "Patient?identifier=foo|bar"
|
||||||
|
* @since 6.8.0
|
||||||
|
*/
|
||||||
|
public DeleteBuilder addTransactionDeleteConditionalEntry(String theCondition) {
|
||||||
|
Validate.notBlank(theCondition, "theCondition must not be blank");
|
||||||
|
|
||||||
|
setBundleField("type", "transaction");
|
||||||
|
return addDeleteEntry(theCondition);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds an entry containing a delete (DELETE) request.
|
* Adds an entry containing a delete (DELETE) request.
|
||||||
* Also sets the Bundle.type value to "transaction" if it is not already set.
|
* Also sets the Bundle.type value to "transaction" if it is not already set.
|
||||||
|
@ -29,6 +29,7 @@ public class DateRangeUtil {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Narrow the DateRange to be within theStartInclusive, and theEndExclusive, if provided.
|
* Narrow the DateRange to be within theStartInclusive, and theEndExclusive, if provided.
|
||||||
|
*
|
||||||
* @param theDateRangeParam the initial range, null for unconstrained
|
* @param theDateRangeParam the initial range, null for unconstrained
|
||||||
* @param theStartInclusive a lower bound to apply, or null for unchanged.
|
* @param theStartInclusive a lower bound to apply, or null for unchanged.
|
||||||
* @param theEndExclusive an upper bound to apply, or null for unchanged.
|
* @param theEndExclusive an upper bound to apply, or null for unchanged.
|
||||||
@ -39,16 +40,23 @@ public class DateRangeUtil {
|
|||||||
if (theStartInclusive == null && theEndExclusive == null) {
|
if (theStartInclusive == null && theEndExclusive == null) {
|
||||||
return theDateRangeParam;
|
return theDateRangeParam;
|
||||||
}
|
}
|
||||||
DateRangeParam result = theDateRangeParam==null?new DateRangeParam():new DateRangeParam(theDateRangeParam);
|
DateRangeParam result = theDateRangeParam == null ? new DateRangeParam() : new DateRangeParam(theDateRangeParam);
|
||||||
|
|
||||||
if (theStartInclusive != null) {
|
Date startInclusive = theStartInclusive;
|
||||||
|
if (startInclusive != null) {
|
||||||
Date inputStart = result.getLowerBoundAsInstant();
|
Date inputStart = result.getLowerBoundAsInstant();
|
||||||
if (theDateRangeParam == null || inputStart == null || inputStart.before(theStartInclusive)) {
|
|
||||||
result.setLowerBoundInclusive(theStartInclusive);
|
Date upperBound = result.getUpperBoundAsInstant();
|
||||||
|
if (upperBound != null && upperBound.before(startInclusive)) {
|
||||||
|
startInclusive = upperBound;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (theDateRangeParam == null || inputStart == null || inputStart.before(startInclusive)) {
|
||||||
|
result.setLowerBoundInclusive(startInclusive);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (theEndExclusive != null) {
|
if (theEndExclusive != null) {
|
||||||
Date inputEnd = result.getUpperBound() == null? null : result.getUpperBound().getValue();
|
Date inputEnd = result.getUpperBound() == null ? null : result.getUpperBound().getValue();
|
||||||
if (theDateRangeParam == null || inputEnd == null || inputEnd.after(theEndExclusive)) {
|
if (theDateRangeParam == null || inputEnd == null || inputEnd.after(theEndExclusive)) {
|
||||||
result.setUpperBoundExclusive(theEndExclusive);
|
result.setUpperBoundExclusive(theEndExclusive);
|
||||||
}
|
}
|
||||||
|
@ -28,6 +28,8 @@ public class Logs {
|
|||||||
|
|
||||||
private static final Logger ourSubscriptionTroubleshootingLog = LoggerFactory.getLogger("ca.cdr.log.subscription_troubleshooting");
|
private static final Logger ourSubscriptionTroubleshootingLog = LoggerFactory.getLogger("ca.cdr.log.subscription_troubleshooting");
|
||||||
|
|
||||||
|
private static final Logger ourSubscriptionTopicLog = LoggerFactory.getLogger("ca.uhn.fhir.log.subscription_topic_troubleshooting");
|
||||||
|
|
||||||
public static Logger getBatchTroubleshootingLog() {
|
public static Logger getBatchTroubleshootingLog() {
|
||||||
return ourBatchTroubleshootingLog;
|
return ourBatchTroubleshootingLog;
|
||||||
}
|
}
|
||||||
@ -39,4 +41,8 @@ public class Logs {
|
|||||||
public static Logger getSubscriptionTroubleshootingLog() {
|
public static Logger getSubscriptionTroubleshootingLog() {
|
||||||
return ourSubscriptionTroubleshootingLog;
|
return ourSubscriptionTroubleshootingLog;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Logger getSubscriptionTopicLog() {
|
||||||
|
return ourSubscriptionTopicLog;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -129,6 +129,15 @@ public class ParametersUtil {
|
|||||||
return getParameterPartValue(theCtx, theParameter, theParameterName).map(t -> (IPrimitiveType<?>) t).map(t -> t.getValueAsString()).orElse(null);
|
return getParameterPartValue(theCtx, theParameter, theParameterName).map(t -> (IPrimitiveType<?>) t).map(t -> t.getValueAsString()).orElse(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Optional<Integer> getParameterPartValueAsInteger(FhirContext theCtx, IBase theParameter, String theParameterName) {
|
||||||
|
return getParameterPartValue(theCtx, theParameter, theParameterName)
|
||||||
|
.filter(t -> IPrimitiveType.class.isAssignableFrom(t.getClass()))
|
||||||
|
.map(t -> (IPrimitiveType<?>) t)
|
||||||
|
.map(IPrimitiveType::getValue)
|
||||||
|
.filter(t -> Integer.class.isAssignableFrom(t.getClass()))
|
||||||
|
.map(t -> (Integer) t);
|
||||||
|
}
|
||||||
|
|
||||||
private static <T> List<T> extractNamedParameters(FhirContext theCtx, IBaseParameters theParameters, String theParameterName, Function<IPrimitiveType<?>, T> theMapper) {
|
private static <T> List<T> extractNamedParameters(FhirContext theCtx, IBaseParameters theParameters, String theParameterName, Function<IPrimitiveType<?>, T> theMapper) {
|
||||||
List<T> retVal = new ArrayList<>();
|
List<T> retVal = new ArrayList<>();
|
||||||
|
|
||||||
|
@ -441,7 +441,10 @@ public final class TerserUtil {
|
|||||||
private static void replaceField(FhirTerser theTerser, IBaseResource theFrom, IBaseResource theTo, BaseRuntimeChildDefinition childDefinition) {
|
private static void replaceField(FhirTerser theTerser, IBaseResource theFrom, IBaseResource theTo, BaseRuntimeChildDefinition childDefinition) {
|
||||||
List<IBase> fromValues = childDefinition.getAccessor().getValues(theFrom);
|
List<IBase> fromValues = childDefinition.getAccessor().getValues(theFrom);
|
||||||
List<IBase> toValues = childDefinition.getAccessor().getValues(theTo);
|
List<IBase> toValues = childDefinition.getAccessor().getValues(theTo);
|
||||||
if (fromValues != toValues) {
|
|
||||||
|
if (fromValues.isEmpty() && !toValues.isEmpty()) {
|
||||||
|
childDefinition.getMutator().setValue(theTo, null);
|
||||||
|
} else if (fromValues != toValues) {
|
||||||
clear(toValues);
|
clear(toValues);
|
||||||
|
|
||||||
mergeFields(theTerser, theTo, childDefinition, fromValues, toValues);
|
mergeFields(theTerser, theTo, childDefinition, fromValues, toValues);
|
||||||
@ -529,7 +532,6 @@ public final class TerserUtil {
|
|||||||
* Creates a new element taking into consideration elements with choice that are not directly retrievable by element
|
* Creates a new element taking into consideration elements with choice that are not directly retrievable by element
|
||||||
* name
|
* name
|
||||||
*
|
*
|
||||||
*
|
|
||||||
* @param theFhirTerser
|
* @param theFhirTerser
|
||||||
* @param theChildDefinition Child to create a new instance for
|
* @param theChildDefinition Child to create a new instance for
|
||||||
* @param theFromFieldValue The base parent field
|
* @param theFromFieldValue The base parent field
|
||||||
|
@ -19,15 +19,16 @@
|
|||||||
*/
|
*/
|
||||||
package org.hl7.fhir.instance.model.api;
|
package org.hl7.fhir.instance.model.api;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
|
import ca.uhn.fhir.model.api.IElement;
|
||||||
|
import ca.uhn.fhir.model.api.Include;
|
||||||
|
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
|
||||||
import ca.uhn.fhir.model.api.IElement;
|
|
||||||
import ca.uhn.fhir.model.api.Include;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* For now, this is a simple marker interface indicating that a class is a resource type.
|
* For now, this is a simple marker interface indicating that a class is a resource type.
|
||||||
* There are two concrete types of implementations of this interrface. The first are
|
* There are two concrete types of implementations of this interrface. The first are
|
||||||
@ -58,4 +59,10 @@ public interface IBaseResource extends IBase, IElement {
|
|||||||
|
|
||||||
FhirVersionEnum getStructureFhirVersionEnum();
|
FhirVersionEnum getStructureFhirVersionEnum();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return <code>true</code> if this resource has been deleted
|
||||||
|
*/
|
||||||
|
default boolean isDeleted() {
|
||||||
|
return ResourceMetadataKeyEnum.DELETED_AT.get(this) != null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ import ca.uhn.fhir.rest.param.DateRangeParam;
|
|||||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.MethodSource;
|
import org.junit.jupiter.params.provider.MethodSource;
|
||||||
|
|
||||||
@ -59,6 +60,11 @@ class DateRangeUtilTest {
|
|||||||
new DateParam(theResultStartPrefix, theResultStart), new DateParam(theResultEndPrefix, theResultEnd));
|
new DateParam(theResultStartPrefix, theResultStart), new DateParam(theResultEndPrefix, theResultEnd));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static NarrowCase from(String theMessage, DateRangeParam theRange, Date theNarrowStart, Date theNarrowEnd,
|
||||||
|
DateParam theResultStart, DateParam theResultEnd) {
|
||||||
|
return new NarrowCase(theMessage, theRange, theNarrowStart, theNarrowEnd, theResultStart, theResultEnd);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE)
|
return new ToStringBuilder(this, ToStringStyle.SIMPLE_STYLE)
|
||||||
@ -89,8 +95,23 @@ class DateRangeUtilTest {
|
|||||||
// half-open cases
|
// half-open cases
|
||||||
NarrowCase.from("end inside open end", new DateRangeParam(dateTwo, null), null, dateFour, dateTwo, dateFour),
|
NarrowCase.from("end inside open end", new DateRangeParam(dateTwo, null), null, dateFour, dateTwo, dateFour),
|
||||||
NarrowCase.from("start inside open start", new DateRangeParam(null, dateFour), dateTwo, null, GREATERTHAN_OR_EQUALS, dateTwo, LESSTHAN_OR_EQUALS, dateFour),
|
NarrowCase.from("start inside open start", new DateRangeParam(null, dateFour), dateTwo, null, GREATERTHAN_OR_EQUALS, dateTwo, LESSTHAN_OR_EQUALS, dateFour),
|
||||||
NarrowCase.from("gt case preserved", new DateRangeParam(new DateParam(GREATERTHAN, dateTwo), null), null, dateFour, GREATERTHAN, dateTwo, LESSTHAN, dateFour)
|
NarrowCase.from("gt case preserved", new DateRangeParam(new DateParam(GREATERTHAN, dateTwo), null), null, dateFour, GREATERTHAN, dateTwo, LESSTHAN, dateFour),
|
||||||
|
|
||||||
|
NarrowCase.from("lt date level precision date, narrow from is inside date",
|
||||||
|
new DateRangeParam(new DateParam(LESSTHAN, "2023-05-06")),
|
||||||
|
Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")),
|
||||||
|
Date.from(Instant.parse("2023-05-10T00:00:00.000+00:00")),
|
||||||
|
new DateParam(GREATERTHAN_OR_EQUALS, Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00"))),
|
||||||
|
new DateParam(LESSTHAN, "2023-05-06")
|
||||||
|
),
|
||||||
|
|
||||||
|
NarrowCase.from("gt date level precision date, narrow to is inside date",
|
||||||
|
new DateRangeParam(new DateParam(GREATERTHAN_OR_EQUALS, "2023-05-06")),
|
||||||
|
Date.from(Instant.parse("2023-05-01T00:00:00.000+00:00")),
|
||||||
|
Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")),
|
||||||
|
new DateParam(GREATERTHAN_OR_EQUALS, "2023-05-06"),
|
||||||
|
new DateParam(LESSTHAN, Date.from(Instant.parse("2023-05-06T10:00:20.512+00:00")))
|
||||||
|
)
|
||||||
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-bom</artifactId>
|
<artifactId>hapi-fhir-bom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<name>HAPI FHIR BOM</name>
|
<name>HAPI FHIR BOM</name>
|
||||||
@ -12,7 +12,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -2,8 +2,8 @@ package ca.uhn.fhir.cli;
|
|||||||
|
|
||||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||||
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
|
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
|
||||||
import ca.uhn.fhir.test.utilities.TlsAuthenticationTestHelper;
|
|
||||||
import ca.uhn.fhir.test.utilities.RestServerR4Helper;
|
import ca.uhn.fhir.test.utilities.RestServerR4Helper;
|
||||||
|
import ca.uhn.fhir.test.utilities.TlsAuthenticationTestHelper;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.DefaultParser;
|
import org.apache.commons.cli.DefaultParser;
|
||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
@ -29,7 +29,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
|||||||
class ExampleDataUploaderTest {
|
class ExampleDataUploaderTest {
|
||||||
|
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper();
|
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newWithTransactionLatch();
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
||||||
|
|
||||||
@ -46,7 +46,8 @@ class ExampleDataUploaderTest {
|
|||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@ValueSource(booleans = {true, false})
|
@ValueSource(booleans = {true, false})
|
||||||
public void testHeaderPassthrough(boolean theIncludeTls) throws ParseException {
|
public void testHeaderPassthrough(boolean theIncludeTls) throws ParseException, InterruptedException {
|
||||||
|
// setup
|
||||||
String headerKey = "test-header-key";
|
String headerKey = "test-header-key";
|
||||||
String headerValue = "test header value";
|
String headerValue = "test header value";
|
||||||
|
|
||||||
@ -60,8 +61,11 @@ class ExampleDataUploaderTest {
|
|||||||
);
|
);
|
||||||
|
|
||||||
final CommandLine commandLine = new DefaultParser().parse(testedCommand.getOptions(), args, true);
|
final CommandLine commandLine = new DefaultParser().parse(testedCommand.getOptions(), args, true);
|
||||||
testedCommand.run(commandLine);
|
|
||||||
|
|
||||||
|
// execute
|
||||||
|
myRestServerR4Helper.executeWithLatch(() -> runCommand(commandLine));
|
||||||
|
|
||||||
|
// validate
|
||||||
assertNotNull(myCapturingInterceptor.getLastRequest());
|
assertNotNull(myCapturingInterceptor.getLastRequest());
|
||||||
Map<String, List<String>> allHeaders = myCapturingInterceptor.getLastRequest().getAllHeaders();
|
Map<String, List<String>> allHeaders = myCapturingInterceptor.getLastRequest().getAllHeaders();
|
||||||
assertFalse(allHeaders.isEmpty());
|
assertFalse(allHeaders.isEmpty());
|
||||||
@ -78,6 +82,14 @@ class ExampleDataUploaderTest {
|
|||||||
assertEquals("EX3152", resource.getIdElement().getIdPart());
|
assertEquals("EX3152", resource.getIdElement().getIdPart());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void runCommand(CommandLine commandLine) {
|
||||||
|
try {
|
||||||
|
testedCommand.run(commandLine);
|
||||||
|
} catch (ParseException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static class RequestCapturingExampleDataUploader extends ExampleDataUploader {
|
private static class RequestCapturingExampleDataUploader extends ExampleDataUploader {
|
||||||
private final CapturingInterceptor myCapturingInterceptor;
|
private final CapturingInterceptor myCapturingInterceptor;
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ public class ExportConceptMapToCsvCommandDstu3Test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public final RestServerDstu3Helper myRestServerDstu3Helper = new RestServerDstu3Helper(true);
|
public final RestServerDstu3Helper myRestServerDstu3Helper = RestServerDstu3Helper.newInitialized();
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ public class ExportConceptMapToCsvCommandR4Test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper(true);
|
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ public class ImportCsvToConceptMapCommandDstu3Test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public final RestServerDstu3Helper myRestServerDstu3Helper = new RestServerDstu3Helper(true);
|
public final RestServerDstu3Helper myRestServerDstu3Helper = RestServerDstu3Helper.newInitialized();
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ public class ImportCsvToConceptMapCommandR4Test {
|
|||||||
|
|
||||||
|
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper(true);
|
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ class ReindexTerminologyCommandTest {
|
|||||||
private BaseJpaSystemProvider<?, ?> myProvider = spy(new BaseJpaSystemProvider<>() {});
|
private BaseJpaSystemProvider<?, ?> myProvider = spy(new BaseJpaSystemProvider<>() {});
|
||||||
|
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper(true);
|
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
||||||
|
|
||||||
|
@ -104,9 +104,9 @@ public class UploadTerminologyCommandTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public final RestServerR4Helper myRestServerR4Helper = new RestServerR4Helper(true);
|
public final RestServerR4Helper myRestServerR4Helper = RestServerR4Helper.newInitialized();
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public final RestServerDstu3Helper myRestServerDstu3Helper = new RestServerDstu3Helper(true);
|
public final RestServerDstu3Helper myRestServerDstu3Helper = RestServerDstu3Helper.newInitialized();
|
||||||
@RegisterExtension
|
@RegisterExtension
|
||||||
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
public TlsAuthenticationTestHelper myTlsAuthenticationTestHelper = new TlsAuthenticationTestHelper();
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir-cli</artifactId>
|
<artifactId>hapi-fhir-cli</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-fhir</artifactId>
|
<artifactId>hapi-fhir</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -1,3 +1,22 @@
|
|||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR - Docs
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.hapi.fhir.docs;
|
package ca.uhn.hapi.fhir.docs;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
---
|
---
|
||||||
type: add
|
type: add
|
||||||
issue: 4697
|
issue: 4697
|
||||||
title: "Add providers for the operations available in the Clinical Reasoning package."
|
title: "Added DSTU3 and R4 support for the FHIR Clinical Reasoning module operations ActivityDefinition/$apply and PlanDefinition/$apply.
|
||||||
|
$apply allows for general workflow processing and is used in clinical decision support, prior authorization, quality reporting, and disease surveillance use cases."
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 4697
|
||||||
|
title: "Added DSTU3 and R4 support for the DaVinci Documentation Templates and Rules (DTR) Questionnaire/$questionnaire-package operation.
|
||||||
|
This operation allows a Questionnaire to be packaged as a Bundle with all the supporting resources that may be required for its use such as ValueSets and Libraries.
|
||||||
|
This operation is used in context of prior authorization."
|
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 4697
|
||||||
|
title: "Added R4 support for Questionnaire/$prepopulate and PlanDefinition/$package operations. These are operations are intended to support extended DaVinci DTR and SDC uses cases."
|
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 4697
|
||||||
|
title: "Added DSTU3 and R4 support for the DaVinci Structured Data Capture (SDC) operations Questionnaire/$populate operation and QuestionnaireResponse/$extract.
|
||||||
|
These operations are used in data capture and exchange use cases, and are used by downstream specifications such as DaVinci Documentation Templates and Rules (DTR) for prior authorization."
|
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4814
|
||||||
|
title: "A recent regression prevented the SQL Migrator from running on Oracle. This has been
|
||||||
|
corrected."
|
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4812
|
||||||
|
title: "The tag being added on golden resources does not have a version, might as well add one."
|
@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
type: change
|
||||||
|
issue: 4817
|
||||||
|
title: "Introduce IBaseResource.isDeleted() method and convert code to use it.
|
||||||
|
Add subscription_topic_troubleshooting log.
|
||||||
|
No longer rely on ResourceGoneException to detect deleted subscription. Instead use the new isDeleted() method.
|
||||||
|
Demote unexpected exceptions in HapiTransactionService from error to debug since these exceptions are expected
|
||||||
|
e.g. when checking if a resource has been deleted by catching a ResourceGoneException"
|
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4831
|
||||||
|
title: "When performing a FHIR transaction containing both a conditional delete as well as a
|
||||||
|
conditional create/update for the same resource, the resource was left in an inconsistent
|
||||||
|
state. This has been corrected. Thanks to Laxman Singh for raising this issue."
|
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
type: perf
|
||||||
|
issue: 4831
|
||||||
|
title: "Conditional deletes that delete multiple resources at once have been optimized to perform
|
||||||
|
fewer SQL select statements, which should improve performance on large deletes."
|
@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4838
|
||||||
|
title: "Two failures in the $delete-expunge operation were fixed:
|
||||||
|
<ul>
|
||||||
|
<li>Jobs could fail if hibernate search was loaded but not enabled.</li>
|
||||||
|
<li>Jobs could fail if the criteria included a <code>_lastUpdated=lt[date]</code> clause</li>
|
||||||
|
</ul>"
|
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4838
|
||||||
|
title: "The BALP AsyncMemoryQueueBackedFhirClientBalpSink incorrectly used a non-blocking method
|
||||||
|
to add events to the blocking queue, resulting in race conditions on a heavily loaded
|
||||||
|
server."
|
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 4838
|
||||||
|
title: "When performing a resource reindex on a deleted resource, any search index rows will now
|
||||||
|
be deleted. Deleting a resource should generally not leave any such rows behind, but they can
|
||||||
|
be left if the resource is manually deleted using SQL directly against the database and in this
|
||||||
|
case the reindex job will now clean up these unwanted rows."
|
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4838
|
||||||
|
title: "When reindexing resources, deleted resources could incorrectly fail validation rules and
|
||||||
|
cause the reindex job to not complete correctly. This has been corrected."
|
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 4838
|
||||||
|
title: "When invoking the instance level `$reindex` and `$reindex-dryrun` operations, the resulting
|
||||||
|
status message and any warnings are now included in the response Parameters object as well as in
|
||||||
|
the generated response HTML narrative."
|
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4842
|
||||||
|
title: "There was a bug with the TerserUtil, where it would not overwrite non-empty values with empty values from a source resource. This has been corrected. Thanks to @nigtrifork for the fix!"
|
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 4881
|
||||||
|
title: "When _Index Contained References_ is enabled in the JPA server, Bundle resources could not be
|
||||||
|
stored or indexed due to an incompatibility with the default Bundle search parameters. This has been
|
||||||
|
corrected."
|
@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
- item:
|
||||||
|
type: "add"
|
||||||
|
title: "The version of a few dependencies have been bumped to the latest versions
|
||||||
|
(dependent HAPI modules listed in brackets):
|
||||||
|
<ul>
|
||||||
|
<li>Hibernate ORM (JPA): 5.6.12.Final -> 5.6.15.Final</li>
|
||||||
|
</ul>"
|
@ -11,7 +11,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -166,7 +166,6 @@ import java.util.stream.Collectors;
|
|||||||
|
|
||||||
import static java.util.Objects.isNull;
|
import static java.util.Objects.isNull;
|
||||||
import static java.util.Objects.nonNull;
|
import static java.util.Objects.nonNull;
|
||||||
import static org.apache.commons.lang3.BooleanUtils.isFalse;
|
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
import static org.apache.commons.lang3.StringUtils.left;
|
import static org.apache.commons.lang3.StringUtils.left;
|
||||||
@ -537,20 +536,16 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||||||
|
|
||||||
|
|
||||||
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
|
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
|
||||||
String newVersion;
|
|
||||||
long newVersionLong;
|
|
||||||
if (theResourceId == null || theResourceId.getVersionIdPart() == null) {
|
if (theResourceId == null || theResourceId.getVersionIdPart() == null) {
|
||||||
newVersion = "1";
|
theSavedEntity.initializeVersion();
|
||||||
newVersionLong = 1;
|
|
||||||
} else {
|
} else {
|
||||||
newVersionLong = theResourceId.getVersionIdPartAsLong() + 1;
|
theSavedEntity.markVersionUpdatedInCurrentTransaction();
|
||||||
newVersion = Long.toString(newVersionLong);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
assert theResourceId != null;
|
assert theResourceId != null;
|
||||||
|
String newVersion = Long.toString(theSavedEntity.getVersion());
|
||||||
IIdType newId = theResourceId.withVersion(newVersion);
|
IIdType newId = theResourceId.withVersion(newVersion);
|
||||||
theResource.getIdElement().setValue(newId.getValue());
|
theResource.getIdElement().setValue(newId.getValue());
|
||||||
theSavedEntity.setVersion(newVersionLong);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isLogicalReference(IIdType theId) {
|
public boolean isLogicalReference(IIdType theId) {
|
||||||
@ -966,7 +961,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||||||
* This should be the very first thing..
|
* This should be the very first thing..
|
||||||
*/
|
*/
|
||||||
if (theResource != null) {
|
if (theResource != null) {
|
||||||
if (thePerformIndexing) {
|
if (thePerformIndexing && theDeletedTimestampOrNull == null) {
|
||||||
if (!ourValidationDisabledForUnitTest) {
|
if (!ourValidationDisabledForUnitTest) {
|
||||||
validateResourceForStorage((T) theResource, entity);
|
validateResourceForStorage((T) theResource, entity);
|
||||||
}
|
}
|
||||||
@ -1062,7 +1057,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||||||
verifyMatchUrlForConditionalCreate(theResource, entity.getCreatedByMatchUrl(), newParams, theRequest);
|
verifyMatchUrlForConditionalCreate(theResource, entity.getCreatedByMatchUrl(), newParams, theRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (CURRENTLY_REINDEXING.get(theResource) != Boolean.TRUE) {
|
||||||
entity.setUpdated(theTransactionDetails.getTransactionDate());
|
entity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||||
|
}
|
||||||
newParams.populateResourceTableSearchParamsPresentFlags(entity);
|
newParams.populateResourceTableSearchParamsPresentFlags(entity);
|
||||||
entity.setIndexStatus(INDEX_STATUS_INDEXED);
|
entity.setIndexStatus(INDEX_STATUS_INDEXED);
|
||||||
}
|
}
|
||||||
@ -1091,9 +1088,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||||||
return entity;
|
return entity;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (theUpdateVersion) {
|
if (entity.getId() != null && theUpdateVersion) {
|
||||||
long newVersion = entity.getVersion() + 1;
|
entity.markVersionUpdatedInCurrentTransaction();
|
||||||
entity.setVersion(newVersion);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -1157,6 +1153,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||||||
if (thePerformIndexing) {
|
if (thePerformIndexing) {
|
||||||
if (newParams == null) {
|
if (newParams == null) {
|
||||||
myExpungeService.deleteAllSearchParams(JpaPid.fromId(entity.getId()));
|
myExpungeService.deleteAllSearchParams(JpaPid.fromId(entity.getId()));
|
||||||
|
entity.clearAllParamsPopulated();
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
// Synchronize search param indexes
|
// Synchronize search param indexes
|
||||||
@ -1296,6 +1293,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||||||
|
|
||||||
private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) {
|
private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) {
|
||||||
boolean versionedTags = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.VERSIONED;
|
boolean versionedTags = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.VERSIONED;
|
||||||
|
|
||||||
final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags);
|
final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags);
|
||||||
historyEntry.setEncoding(theChanged.getEncoding());
|
historyEntry.setEncoding(theChanged.getEncoding());
|
||||||
historyEntry.setResource(theChanged.getResourceBinary());
|
historyEntry.setResource(theChanged.getResourceBinary());
|
||||||
@ -1412,11 +1410,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||||||
myJpaStorageResourceParser.populateResourceMetadata(entity, false, tagList, version, theResource);
|
myJpaStorageResourceParser.populateResourceMetadata(entity, false, tagList, version, theResource);
|
||||||
|
|
||||||
boolean wasDeleted = false;
|
boolean wasDeleted = false;
|
||||||
// NB If this if-else ever gets collapsed, make sure to account for possible null (will happen in mass-ingestion mode)
|
if (theOldResource != null) {
|
||||||
if (theOldResource instanceof IResource) {
|
wasDeleted = theOldResource.isDeleted();
|
||||||
wasDeleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) theOldResource) != null;
|
|
||||||
} else if (theOldResource instanceof IAnyResource) {
|
|
||||||
wasDeleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) theOldResource) != null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType).setCreated(wasDeleted);
|
DaoMethodOutcome outcome = toMethodOutcome(theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType).setCreated(wasDeleted);
|
||||||
|
@ -35,6 +35,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||||
import ca.uhn.fhir.jpa.api.dao.ReindexOutcome;
|
import ca.uhn.fhir.jpa.api.dao.ReindexOutcome;
|
||||||
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
|
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
|
||||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||||
@ -199,9 +200,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
private TransactionTemplate myTxTemplate;
|
private TransactionTemplate myTxTemplate;
|
||||||
@Autowired
|
@Autowired
|
||||||
private UrlPartitioner myUrlPartitioner;
|
private UrlPartitioner myUrlPartitioner;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private ResourceSearchUrlSvc myResourceSearchUrlSvc;
|
private ResourceSearchUrlSvc myResourceSearchUrlSvc;
|
||||||
|
@Autowired
|
||||||
|
private IFhirSystemDao<?, ?> mySystemDao;
|
||||||
|
|
||||||
public static <T extends IBaseResource> T invokeStoragePreShowResources(IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequest, T retVal) {
|
public static <T extends IBaseResource> T invokeStoragePreShowResources(IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequest, T retVal) {
|
||||||
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PRESHOW_RESOURCES, theInterceptorBroadcaster, theRequest)) {
|
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_PRESHOW_RESOURCES, theInterceptorBroadcaster, theRequest)) {
|
||||||
@ -263,12 +265,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public DaoMethodOutcome create(final T theResource) {
|
public DaoMethodOutcome create(final T theResource) {
|
||||||
return create(theResource, null, true, new TransactionDetails(), null);
|
return create(theResource, null, true, null, new TransactionDetails());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DaoMethodOutcome create(final T theResource, RequestDetails theRequestDetails) {
|
public DaoMethodOutcome create(final T theResource, RequestDetails theRequestDetails) {
|
||||||
return create(theResource, null, true, new TransactionDetails(), theRequestDetails);
|
return create(theResource, null, true, theRequestDetails, new TransactionDetails());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -281,11 +283,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DaoMethodOutcome create(final T theResource, String theIfNoneExist, RequestDetails theRequestDetails) {
|
public DaoMethodOutcome create(final T theResource, String theIfNoneExist, RequestDetails theRequestDetails) {
|
||||||
return create(theResource, theIfNoneExist, true, new TransactionDetails(), theRequestDetails);
|
return create(theResource, theIfNoneExist, true, theRequestDetails, new TransactionDetails());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, @Nonnull TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
|
public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) {
|
||||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName());
|
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName());
|
||||||
return myTransactionService
|
return myTransactionService
|
||||||
.withRequest(theRequestDetails)
|
.withRequest(theRequestDetails)
|
||||||
@ -340,7 +342,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
entity.setResourceType(toResourceName(theResource));
|
entity.setResourceType(toResourceName(theResource));
|
||||||
entity.setPartitionId(PartitionablePartitionId.toStoragePartition(theRequestPartitionId, myPartitionSettings));
|
entity.setPartitionId(PartitionablePartitionId.toStoragePartition(theRequestPartitionId, myPartitionSettings));
|
||||||
entity.setCreatedByMatchUrl(theMatchUrl);
|
entity.setCreatedByMatchUrl(theMatchUrl);
|
||||||
entity.setVersion(1);
|
entity.initializeVersion();
|
||||||
|
|
||||||
if (isNotBlank(theMatchUrl) && theProcessMatchUrl) {
|
if (isNotBlank(theMatchUrl) && theProcessMatchUrl) {
|
||||||
Set<JpaPid> match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest);
|
Set<JpaPid> match = myMatchResourceUrlService.processMatchUrl(theMatchUrl, myResourceType, theTransactionDetails, theRequest);
|
||||||
@ -348,19 +350,51 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theMatchUrl, match.size());
|
String msg = getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theMatchUrl, match.size());
|
||||||
throw new PreconditionFailedException(Msg.code(958) + msg);
|
throw new PreconditionFailedException(Msg.code(958) + msg);
|
||||||
} else if (match.size() == 1) {
|
} else if (match.size() == 1) {
|
||||||
JpaPid pid = match.iterator().next();
|
|
||||||
|
|
||||||
Supplier<LazyDaoMethodOutcome.EntityAndResource> entitySupplier = () -> {
|
/*
|
||||||
return myTxTemplate.execute(tx -> {
|
* Ok, so we've found a single PID that matches the conditional URL.
|
||||||
|
* That's good, there are two possibilities below.
|
||||||
|
*/
|
||||||
|
|
||||||
|
JpaPid pid = match.iterator().next();
|
||||||
|
if (theTransactionDetails.getDeletedResourceIds().contains(pid)) {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If the resource matching the given match URL has already been
|
||||||
|
* deleted within this transaction. This is a really rare case, since
|
||||||
|
* it means the client has performed a FHIR transaction with both
|
||||||
|
* a delete and a create on the same conditional URL. This is rare
|
||||||
|
* but allowed, and means that it's now ok to create a new one resource
|
||||||
|
* matching the conditional URL since we'll be deleting any existing
|
||||||
|
* index rows on the existing resource as a part of this transaction.
|
||||||
|
* We can also un-resolve the previous match URL in the TransactionDetails
|
||||||
|
* since we'll resolve it to the new resource ID below
|
||||||
|
*/
|
||||||
|
|
||||||
|
myMatchResourceUrlService.unresolveMatchUrl(theTransactionDetails, getResourceName(), theMatchUrl);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This is the normal path where the conditional URL matched exactly
|
||||||
|
* one resource, so we won't be creating anything but instead
|
||||||
|
* just returning the existing ID. We now have a PID for the matching
|
||||||
|
* resource, but we haven't loaded anything else (e.g. the forced ID
|
||||||
|
* or the resource body aren't yet loaded from the DB). We're going to
|
||||||
|
* return a LazyDaoOutcome with two lazy loaded providers for loading the
|
||||||
|
* entity and the forced ID since we can avoid these extra SQL loads
|
||||||
|
* unless we know we're actually going to use them. For example, if
|
||||||
|
* the client has specified "Prefer: return=minimal" then we won't be
|
||||||
|
* needing the load the body.
|
||||||
|
*/
|
||||||
|
|
||||||
|
Supplier<LazyDaoMethodOutcome.EntityAndResource> entitySupplier = () -> myTxTemplate.execute(tx -> {
|
||||||
ResourceTable foundEntity = myEntityManager.find(ResourceTable.class, pid.getId());
|
ResourceTable foundEntity = myEntityManager.find(ResourceTable.class, pid.getId());
|
||||||
IBaseResource resource = myJpaStorageResourceParser.toResource(foundEntity, false);
|
IBaseResource resource = myJpaStorageResourceParser.toResource(foundEntity, false);
|
||||||
theResource.setId(resource.getIdElement().getValue());
|
theResource.setId(resource.getIdElement().getValue());
|
||||||
return new LazyDaoMethodOutcome.EntityAndResource(foundEntity, resource);
|
return new LazyDaoMethodOutcome.EntityAndResource(foundEntity, resource);
|
||||||
});
|
});
|
||||||
};
|
Supplier<IIdType> idSupplier = () -> myTxTemplate.execute(tx -> {
|
||||||
|
|
||||||
Supplier<IIdType> idSupplier = () -> {
|
|
||||||
return myTxTemplate.execute(tx -> {
|
|
||||||
IIdType retVal = myIdHelperService.translatePidIdToForcedId(myFhirContext, myResourceName, pid);
|
IIdType retVal = myIdHelperService.translatePidIdToForcedId(myFhirContext, myResourceName, pid);
|
||||||
if (!retVal.hasVersionIdPart()) {
|
if (!retVal.hasVersionIdPart()) {
|
||||||
Long version = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, pid.getId());
|
Long version = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, pid.getId());
|
||||||
@ -376,7 +410,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
}
|
}
|
||||||
return retVal;
|
return retVal;
|
||||||
});
|
});
|
||||||
};
|
|
||||||
|
|
||||||
DaoMethodOutcome outcome = toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true);
|
DaoMethodOutcome outcome = toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true);
|
||||||
StorageResponseCodeEnum responseCode = StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH;
|
StorageResponseCodeEnum responseCode = StorageResponseCodeEnum.SUCCESSFUL_CREATE_WITH_CONDITIONAL_MATCH;
|
||||||
@ -385,6 +418,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
return outcome;
|
return outcome;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
String resourceIdBeforeStorage = theResource.getIdElement().getIdPart();
|
String resourceIdBeforeStorage = theResource.getIdElement().getIdPart();
|
||||||
boolean resourceHadIdBeforeStorage = isNotBlank(resourceIdBeforeStorage);
|
boolean resourceHadIdBeforeStorage = isNotBlank(resourceIdBeforeStorage);
|
||||||
@ -617,12 +651,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
throw new ResourceVersionConflictException(Msg.code(961) + "Trying to delete " + theId + " but this is not the current version");
|
throw new ResourceVersionConflictException(Msg.code(961) + "Trying to delete " + theId + " but this is not the current version");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
JpaPid persistentId = JpaPid.fromId(entity.getResourceId());
|
||||||
|
theTransactionDetails.addDeletedResourceId(persistentId);
|
||||||
|
|
||||||
// Don't delete again if it's already deleted
|
// Don't delete again if it's already deleted
|
||||||
if (isDeleted(entity)) {
|
if (isDeleted(entity)) {
|
||||||
DaoMethodOutcome outcome = createMethodOutcomeForResourceId(entity.getIdDt().getValue(), MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED, StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED);
|
DaoMethodOutcome outcome = createMethodOutcomeForResourceId(entity.getIdDt().getValue(), MESSAGE_KEY_DELETE_RESOURCE_ALREADY_DELETED, StorageResponseCodeEnum.SUCCESSFUL_DELETE_ALREADY_DELETED);
|
||||||
|
|
||||||
// used to exist, so we'll set the persistent id
|
// used to exist, so we'll set the persistent id
|
||||||
outcome.setPersistentId(JpaPid.fromId(entity.getResourceId()));
|
outcome.setPersistentId(persistentId);
|
||||||
outcome.setEntity(entity);
|
outcome.setEntity(entity);
|
||||||
|
|
||||||
return outcome;
|
return outcome;
|
||||||
@ -681,7 +718,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
|
|
||||||
return myTransactionService.execute(theRequest, transactionDetails, tx -> {
|
return myTransactionService.execute(theRequest, transactionDetails, tx -> {
|
||||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||||
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest);
|
DeleteMethodOutcome outcome = deleteByUrl(theUrl, deleteConflicts, theRequest, transactionDetails);
|
||||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||||
return outcome;
|
return outcome;
|
||||||
});
|
});
|
||||||
@ -692,20 +729,19 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
* transaction processors
|
* transaction processors
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public DeleteMethodOutcome deleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequestDetails) {
|
public DeleteMethodOutcome deleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) {
|
||||||
validateDeleteEnabled();
|
validateDeleteEnabled();
|
||||||
TransactionDetails transactionDetails = new TransactionDetails();
|
|
||||||
|
|
||||||
return myTransactionService.execute(theRequestDetails, transactionDetails, tx -> doDeleteByUrl(theUrl, deleteConflicts, theRequestDetails));
|
return myTransactionService.execute(theRequestDetails, theTransactionDetails, tx -> doDeleteByUrl(theUrl, deleteConflicts, theTransactionDetails, theRequestDetails));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private DeleteMethodOutcome doDeleteByUrl(String theUrl, DeleteConflictList deleteConflicts, RequestDetails theRequest) {
|
private DeleteMethodOutcome doDeleteByUrl(String theUrl, DeleteConflictList deleteConflicts, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
|
||||||
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl);
|
ResourceSearch resourceSearch = myMatchUrlService.getResourceSearch(theUrl);
|
||||||
SearchParameterMap paramMap = resourceSearch.getSearchParameterMap();
|
SearchParameterMap paramMap = resourceSearch.getSearchParameterMap();
|
||||||
paramMap.setLoadSynchronous(true);
|
paramMap.setLoadSynchronous(true);
|
||||||
|
|
||||||
Set<JpaPid> resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequest, null);
|
Set<JpaPid> resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequestDetails, null);
|
||||||
|
|
||||||
if (resourceIds.size() > 1) {
|
if (resourceIds.size() > 1) {
|
||||||
if (!getStorageSettings().isAllowMultipleDelete()) {
|
if (!getStorageSettings().isAllowMultipleDelete()) {
|
||||||
@ -713,7 +749,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequest);
|
return deletePidList(theUrl, resourceIds, deleteConflicts, theRequestDetails, theTransactionDetails);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -733,15 +769,23 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public <P extends IResourcePersistentId> DeleteMethodOutcome deletePidList(String theUrl, Collection<P> theResourceIds, DeleteConflictList theDeleteConflicts, RequestDetails theRequest) {
|
public <P extends IResourcePersistentId> DeleteMethodOutcome deletePidList(String theUrl, Collection<P> theResourceIds, DeleteConflictList theDeleteConflicts, RequestDetails theRequestDetails, TransactionDetails theTransactionDetails) {
|
||||||
StopWatch w = new StopWatch();
|
StopWatch w = new StopWatch();
|
||||||
TransactionDetails transactionDetails = new TransactionDetails();
|
TransactionDetails transactionDetails = new TransactionDetails();
|
||||||
List<ResourceTable> deletedResources = new ArrayList<>();
|
List<ResourceTable> deletedResources = new ArrayList<>();
|
||||||
|
|
||||||
|
List<IResourcePersistentId<?>> resolvedIds = theResourceIds
|
||||||
|
.stream()
|
||||||
|
.map(t -> (IResourcePersistentId<?>) t)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
mySystemDao.preFetchResources(resolvedIds, false);
|
||||||
|
|
||||||
for (P pid : theResourceIds) {
|
for (P pid : theResourceIds) {
|
||||||
JpaPid jpaPid = (JpaPid) pid;
|
JpaPid jpaPid = (JpaPid) pid;
|
||||||
|
|
||||||
|
// This shouldn't actually need to hit the DB because we pre-fetch above
|
||||||
ResourceTable entity = myEntityManager.find(ResourceTable.class, jpaPid.getId());
|
ResourceTable entity = myEntityManager.find(ResourceTable.class, jpaPid.getId());
|
||||||
deletedResources.add(entity);
|
deletedResources.add(entity);
|
||||||
|
|
||||||
@ -750,18 +794,18 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
// Notify IServerOperationInterceptors about pre-action call
|
// Notify IServerOperationInterceptors about pre-action call
|
||||||
HookParams hooks = new HookParams()
|
HookParams hooks = new HookParams()
|
||||||
.add(IBaseResource.class, resourceToDelete)
|
.add(IBaseResource.class, resourceToDelete)
|
||||||
.add(RequestDetails.class, theRequest)
|
.add(RequestDetails.class, theRequestDetails)
|
||||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||||
.add(TransactionDetails.class, transactionDetails);
|
.add(TransactionDetails.class, transactionDetails);
|
||||||
doCallHooks(transactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED, hooks);
|
doCallHooks(transactionDetails, theRequestDetails, Pointcut.STORAGE_PRESTORAGE_RESOURCE_DELETED, hooks);
|
||||||
|
|
||||||
myDeleteConflictService.validateOkToDelete(theDeleteConflicts, entity, false, theRequest, transactionDetails);
|
myDeleteConflictService.validateOkToDelete(theDeleteConflicts, entity, false, theRequestDetails, transactionDetails);
|
||||||
|
|
||||||
// Perform delete
|
// Perform delete
|
||||||
|
|
||||||
preDelete(resourceToDelete, entity, theRequest);
|
preDelete(resourceToDelete, entity, theRequestDetails);
|
||||||
|
|
||||||
updateEntityForDelete(theRequest, transactionDetails, entity);
|
updateEntityForDelete(theRequestDetails, transactionDetails, entity);
|
||||||
resourceToDelete.setId(entity.getIdDt());
|
resourceToDelete.setId(entity.getIdDt());
|
||||||
|
|
||||||
// Notify JPA interceptors
|
// Notify JPA interceptors
|
||||||
@ -770,11 +814,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
public void beforeCommit(boolean readOnly) {
|
public void beforeCommit(boolean readOnly) {
|
||||||
HookParams hookParams = new HookParams()
|
HookParams hookParams = new HookParams()
|
||||||
.add(IBaseResource.class, resourceToDelete)
|
.add(IBaseResource.class, resourceToDelete)
|
||||||
.add(RequestDetails.class, theRequest)
|
.add(RequestDetails.class, theRequestDetails)
|
||||||
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||||
.add(TransactionDetails.class, transactionDetails)
|
.add(TransactionDetails.class, transactionDetails)
|
||||||
.add(InterceptorInvocationTimingEnum.class, transactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED));
|
.add(InterceptorInvocationTimingEnum.class, transactionDetails.getInvocationTiming(Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED));
|
||||||
doCallHooks(transactionDetails, theRequest, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
|
doCallHooks(transactionDetails, theRequestDetails, Pointcut.STORAGE_PRECOMMIT_RESOURCE_DELETED, hookParams);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -791,6 +835,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
|
|
||||||
ourLog.debug("Processed delete on {} (matched {} resource(s)) in {}ms", theUrl, deletedResources.size(), w.getMillis());
|
ourLog.debug("Processed delete on {} (matched {} resource(s)) in {}ms", theUrl, deletedResources.size(), w.getMillis());
|
||||||
|
|
||||||
|
theTransactionDetails.addDeletedResourceIds(theResourceIds);
|
||||||
|
|
||||||
DeleteMethodOutcome retVal = new DeleteMethodOutcome();
|
DeleteMethodOutcome retVal = new DeleteMethodOutcome();
|
||||||
retVal.setDeletedEntities(deletedResources);
|
retVal.setDeletedEntities(deletedResources);
|
||||||
retVal.setOperationOutcome(oo);
|
retVal.setOperationOutcome(oo);
|
||||||
@ -1341,7 +1387,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (theReindexParameters.getReindexSearchParameters() == ReindexParameters.ReindexSearchParametersEnum.ALL) {
|
if (theReindexParameters.getReindexSearchParameters() == ReindexParameters.ReindexSearchParametersEnum.ALL) {
|
||||||
reindexSearchParameters(entity, retVal);
|
reindexSearchParameters(entity, retVal, theTransactionDetails);
|
||||||
}
|
}
|
||||||
if (theReindexParameters.getOptimizeStorage() != ReindexParameters.OptimizeStorageModeEnum.NONE) {
|
if (theReindexParameters.getOptimizeStorage() != ReindexParameters.OptimizeStorageModeEnum.NONE) {
|
||||||
reindexOptimizeStorage(entity, theReindexParameters.getOptimizeStorage());
|
reindexOptimizeStorage(entity, theReindexParameters.getOptimizeStorage());
|
||||||
@ -1351,23 +1397,49 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private void reindexSearchParameters(ResourceTable entity, ReindexOutcome theReindexOutcome) {
|
private void reindexSearchParameters(ResourceTable entity, ReindexOutcome theReindexOutcome, TransactionDetails theTransactionDetails) {
|
||||||
try {
|
try {
|
||||||
T resource = (T) myJpaStorageResourceParser.toResource(entity, false);
|
T resource = (T) myJpaStorageResourceParser.toResource(entity, false);
|
||||||
reindex(resource, entity);
|
reindexSearchParameters(resource, entity, theTransactionDetails);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
theReindexOutcome.addWarning("Failed to reindex resource " + entity.getIdDt() + ": " + e);
|
theReindexOutcome.addWarning("Failed to reindex resource " + entity.getIdDt() + ": " + e);
|
||||||
myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED);
|
myResourceTableDao.updateIndexStatus(entity.getId(), INDEX_STATUS_INDEXING_FAILED);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Use {@link #reindex(IResourcePersistentId, ReindexParameters, RequestDetails, TransactionDetails)}
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
@Override
|
||||||
|
public void reindex(T theResource, IBasePersistedResource theEntity) {
|
||||||
|
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||||
|
ResourceTable entity = (ResourceTable) theEntity;
|
||||||
|
TransactionDetails transactionDetails = new TransactionDetails(entity.getUpdatedDate());
|
||||||
|
|
||||||
|
reindexSearchParameters(theResource, theEntity, transactionDetails);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void reindexSearchParameters(T theResource, IBasePersistedResource theEntity, TransactionDetails transactionDetails) {
|
||||||
|
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId());
|
||||||
|
if (theResource != null) {
|
||||||
|
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
|
||||||
|
}
|
||||||
|
|
||||||
|
ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false);
|
||||||
|
if (theResource != null) {
|
||||||
|
CURRENTLY_REINDEXING.put(theResource, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private void reindexOptimizeStorage(ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) {
|
private void reindexOptimizeStorage(ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) {
|
||||||
ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity();
|
ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity();
|
||||||
if (historyEntity != null) {
|
if (historyEntity != null) {
|
||||||
reindexOptimizeStorageHistoryEntity(entity, historyEntity);
|
reindexOptimizeStorageHistoryEntity(entity, historyEntity);
|
||||||
if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) {
|
if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) {
|
||||||
int pageSize = 100;
|
int pageSize = 100;
|
||||||
for (int page = 0; ((long)page * pageSize) < entity.getVersion(); page++) {
|
for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) {
|
||||||
Slice<ResourceHistoryTable> historyEntities = myResourceHistoryTableDao.findForResourceIdAndReturnEntities(PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion());
|
Slice<ResourceHistoryTable> historyEntities = myResourceHistoryTableDao.findForResourceIdAndReturnEntities(PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion());
|
||||||
for (ResourceHistoryTable next : historyEntities) {
|
for (ResourceHistoryTable next : historyEntities) {
|
||||||
reindexOptimizeStorageHistoryEntity(entity, next);
|
reindexOptimizeStorageHistoryEntity(entity, next);
|
||||||
@ -1500,23 +1572,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||||||
return entity;
|
return entity;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void reindex(T theResource, IBasePersistedResource theEntity) {
|
|
||||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
|
||||||
|
|
||||||
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId());
|
|
||||||
if (theResource != null) {
|
|
||||||
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
|
|
||||||
}
|
|
||||||
|
|
||||||
ResourceTable entity = (ResourceTable) theEntity;
|
|
||||||
TransactionDetails transactionDetails = new TransactionDetails(entity.getUpdatedDate());
|
|
||||||
ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false);
|
|
||||||
if (theResource != null) {
|
|
||||||
CURRENTLY_REINDEXING.put(theResource, null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Transactional
|
@Transactional
|
||||||
@Override
|
@Override
|
||||||
public void removeTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm) {
|
public void removeTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm) {
|
||||||
|
@ -1,3 +1,22 @@
|
|||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.fhir.jpa.dao;
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
@ -7,14 +26,14 @@ import org.hl7.fhir.instance.model.api.IBaseBooleanDatatype;
|
|||||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||||
|
|
||||||
import java.lang.reflect.Field;
|
import java.lang.reflect.Field;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* We are trying to preserve null behaviour despite IBaseCoding using primitive boolean for userSelected.
|
* We are trying to preserve null behaviour despite IBaseCoding using primitive boolean for userSelected.
|
||||||
*/
|
*/
|
||||||
public class CodingSpy {
|
public class CodingSpy {
|
||||||
final Map<Class, Field> mySpies = new HashMap<>();
|
final Map<Class, Field> mySpies = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reach into the Coding and pull out the Boolean instead of the boolean.
|
* Reach into the Coding and pull out the Boolean instead of the boolean.
|
||||||
|
@ -85,6 +85,10 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long>, I
|
|||||||
@Query("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id")
|
@Query("UPDATE ResourceTable t SET t.myIndexStatus = :status WHERE t.myId = :id")
|
||||||
void updateIndexStatus(@Param("id") Long theId, @Param("status") Long theIndexStatus);
|
void updateIndexStatus(@Param("id") Long theId, @Param("status") Long theIndexStatus);
|
||||||
|
|
||||||
|
@Modifying
|
||||||
|
@Query("UPDATE ResourceTable t SET t.myUpdated = :updated WHERE t.myId = :id")
|
||||||
|
void updateLastUpdated(@Param("id") Long theId, @Param("updated") Date theUpdated);
|
||||||
|
|
||||||
@Modifying
|
@Modifying
|
||||||
@Query("DELETE FROM ResourceTable t WHERE t.myId = :pid")
|
@Query("DELETE FROM ResourceTable t WHERE t.myId = :pid")
|
||||||
void deleteByPid(@Param("pid") Long theId);
|
void deleteByPid(@Param("pid") Long theId);
|
||||||
|
@ -39,9 +39,6 @@ import java.util.stream.Collectors;
|
|||||||
|
|
||||||
public class DeleteExpungeSqlBuilder {
|
public class DeleteExpungeSqlBuilder {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSqlBuilder.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(DeleteExpungeSqlBuilder.class);
|
||||||
public static final String PROCESS_NAME = "Delete Expunging";
|
|
||||||
public static final String THREAD_PREFIX = "delete-expunge";
|
|
||||||
|
|
||||||
private final ResourceTableFKProvider myResourceTableFKProvider;
|
private final ResourceTableFKProvider myResourceTableFKProvider;
|
||||||
private final JpaStorageSettings myStorageSettings;
|
private final JpaStorageSettings myStorageSettings;
|
||||||
private final IIdHelperService myIdHelper;
|
private final IIdHelperService myIdHelper;
|
||||||
|
@ -69,7 +69,7 @@ public class DeleteExpungeSvcImpl implements IDeleteExpungeSvc<JpaPid> {
|
|||||||
* This method clears the Hibernate Search index for the given resources.
|
* This method clears the Hibernate Search index for the given resources.
|
||||||
*/
|
*/
|
||||||
private void clearHibernateSearchIndex(List<JpaPid> thePersistentIds) {
|
private void clearHibernateSearchIndex(List<JpaPid> thePersistentIds) {
|
||||||
if (myFullTextSearchSvc != null) {
|
if (myFullTextSearchSvc != null && !myFullTextSearchSvc.isDisabled()) {
|
||||||
List<Object> objectIds = thePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList());
|
List<Object> objectIds = thePersistentIds.stream().map(JpaPid::getId).collect(Collectors.toList());
|
||||||
myFullTextSearchSvc.deleteIndexedDocumentsByTypeAndId(ResourceTable.class, objectIds);
|
myFullTextSearchSvc.deleteIndexedDocumentsByTypeAndId(ResourceTable.class, objectIds);
|
||||||
ourLog.info("Cleared Hibernate Search indexes.");
|
ourLog.info("Cleared Hibernate Search indexes.");
|
||||||
|
@ -32,6 +32,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||||
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
|
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity;
|
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity;
|
||||||
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
|
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
|
||||||
@ -58,8 +59,6 @@ import org.hl7.fhir.utilities.npm.NpmPackage;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.transaction.PlatformTransactionManager;
|
|
||||||
import org.springframework.transaction.support.TransactionTemplate;
|
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.PostConstruct;
|
import javax.annotation.PostConstruct;
|
||||||
@ -90,7 +89,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private IHapiPackageCacheManager myPackageCacheManager;
|
private IHapiPackageCacheManager myPackageCacheManager;
|
||||||
@Autowired
|
@Autowired
|
||||||
private PlatformTransactionManager myTxManager;
|
private IHapiTransactionService myTxService;
|
||||||
@Autowired
|
@Autowired
|
||||||
private INpmPackageVersionDao myPackageVersionDao;
|
private INpmPackageVersionDao myPackageVersionDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
@ -128,6 +127,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public PackageDeleteOutcomeJson uninstall(PackageInstallationSpec theInstallationSpec) {
|
public PackageDeleteOutcomeJson uninstall(PackageInstallationSpec theInstallationSpec) {
|
||||||
return myPackageCacheManager.uninstallPackage(theInstallationSpec.getName(), theInstallationSpec.getVersion());
|
return myPackageCacheManager.uninstallPackage(theInstallationSpec.getName(), theInstallationSpec.getVersion());
|
||||||
}
|
}
|
||||||
@ -152,7 +152,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
if (enabled) {
|
if (enabled) {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
boolean exists = new TransactionTemplate(myTxManager).execute(tx -> {
|
boolean exists = myTxService.withSystemRequest().withRequestPartitionId(RequestPartitionId.defaultPartition()).execute(() -> {
|
||||||
Optional<NpmPackageVersionEntity> existing = myPackageVersionDao.findByPackageIdAndVersion(theInstallationSpec.getName(), theInstallationSpec.getVersion());
|
Optional<NpmPackageVersionEntity> existing = myPackageVersionDao.findByPackageIdAndVersion(theInstallationSpec.getName(), theInstallationSpec.getVersion());
|
||||||
return existing.isPresent();
|
return existing.isPresent();
|
||||||
});
|
});
|
||||||
@ -267,8 +267,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
}
|
}
|
||||||
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ImplementationGuideInstallationException(Msg.code(1287) + String.format(
|
throw new ImplementationGuideInstallationException(Msg.code(1287) + String.format("Cannot resolve dependency %s#%s", id, ver), e);
|
||||||
"Cannot resolve dependency %s#%s", id, ver), e);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -278,8 +277,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
* Asserts if package FHIR version is compatible with current FHIR version
|
* Asserts if package FHIR version is compatible with current FHIR version
|
||||||
* by using semantic versioning rules.
|
* by using semantic versioning rules.
|
||||||
*/
|
*/
|
||||||
protected void assertFhirVersionsAreCompatible(String fhirVersion, String currentFhirVersion)
|
protected void assertFhirVersionsAreCompatible(String fhirVersion, String currentFhirVersion) throws ImplementationGuideInstallationException {
|
||||||
throws ImplementationGuideInstallationException {
|
|
||||||
|
|
||||||
FhirVersionEnum fhirVersionEnum = FhirVersionEnum.forVersionString(fhirVersion);
|
FhirVersionEnum fhirVersionEnum = FhirVersionEnum.forVersionString(fhirVersion);
|
||||||
FhirVersionEnum currentFhirVersionEnum = FhirVersionEnum.forVersionString(currentFhirVersion);
|
FhirVersionEnum currentFhirVersionEnum = FhirVersionEnum.forVersionString(currentFhirVersion);
|
||||||
@ -290,9 +288,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
compatible = true;
|
compatible = true;
|
||||||
}
|
}
|
||||||
if (!compatible) {
|
if (!compatible) {
|
||||||
throw new ImplementationGuideInstallationException(Msg.code(1288) + String.format(
|
throw new ImplementationGuideInstallationException(Msg.code(1288) + String.format("Cannot install implementation guide: FHIR versions mismatch (expected <=%s, package uses %s)", currentFhirVersion, fhirVersion));
|
||||||
"Cannot install implementation guide: FHIR versions mismatch (expected <=%s, package uses %s)",
|
|
||||||
currentFhirVersion, fhirVersion));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -336,26 +332,18 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
ourLog.info("Skipping update of existing resource matching {}", map.toNormalizedQueryString(myFhirContext));
|
ourLog.info("Skipping update of existing resource matching {}", map.toNormalizedQueryString(myFhirContext));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else{
|
|
||||||
ourLog.warn("Failed to upload resource of type {} with ID {} - Error: Resource failed validation", theResource.fhirType(), theResource.getIdElement().getValue());
|
ourLog.warn("Failed to upload resource of type {} with ID {} - Error: Resource failed validation", theResource.fhirType(), theResource.getIdElement().getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) {
|
private IBundleProvider searchResource(IFhirResourceDao theDao, SearchParameterMap theMap) {
|
||||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
return theDao.search(theMap, newSystemRequestDetails());
|
||||||
SystemRequestDetails requestDetails = newSystemRequestDetails();
|
|
||||||
return theDao.search(theMap, requestDetails);
|
|
||||||
} else {
|
|
||||||
return theDao.search(theMap);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private SystemRequestDetails newSystemRequestDetails() {
|
private SystemRequestDetails newSystemRequestDetails() {
|
||||||
return
|
return new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.defaultPartition());
|
||||||
new SystemRequestDetails()
|
|
||||||
.setRequestPartitionId(RequestPartitionId.defaultPartition());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void createResource(IFhirResourceDao theDao, IBaseResource theResource) {
|
private void createResource(IFhirResourceDao theDao, IBaseResource theResource) {
|
||||||
@ -400,8 +388,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!isValidResourceStatusForPackageUpload(theResource)) {
|
if (!isValidResourceStatusForPackageUpload(theResource)) {
|
||||||
ourLog.warn("Failed to validate resource of type {} with ID {} - Error: Resource status not accepted value.",
|
ourLog.warn("Failed to validate resource of type {} with ID {} - Error: Resource status not accepted value.", theResource.fhirType(), theResource.getIdElement().getValue());
|
||||||
theResource.fhirType(), theResource.getIdElement().getValue());
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -458,8 +445,7 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
|
|||||||
try {
|
try {
|
||||||
return validationSupport.generateSnapshot(new ValidationSupportContext(validationSupport), sd, null, null, null);
|
return validationSupport.generateSnapshot(new ValidationSupportContext(validationSupport), sd, null, null, null);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new ImplementationGuideInstallationException(Msg.code(1290) + String.format(
|
throw new ImplementationGuideInstallationException(Msg.code(1290) + String.format("Failure when generating snapshot of StructureDefinition: %s", sd.getIdElement()), e);
|
||||||
"Failure when generating snapshot of StructureDefinition: %s", sd.getIdElement()), e);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,10 +26,13 @@ import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
|||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.ReindexOutcome;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
|
||||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||||
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
|
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||||
import ca.uhn.fhir.jpa.model.entity.*;
|
import ca.uhn.fhir.jpa.model.entity.*;
|
||||||
import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc;
|
import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc;
|
||||||
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||||
@ -40,6 +43,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
|
|||||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||||
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
|
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
|
||||||
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
|
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||||
@ -59,6 +63,7 @@ import javax.annotation.Nonnull;
|
|||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -132,6 +137,7 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
|
|||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private Parameters reindexInTransaction(RequestDetails theRequestDetails, IIdType theResourceId) {
|
private Parameters reindexInTransaction(RequestDetails theRequestDetails, IIdType theResourceId) {
|
||||||
|
StopWatch sw = new StopWatch();
|
||||||
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType());
|
IFhirResourceDao dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType());
|
||||||
ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails);
|
ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails);
|
||||||
IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false);
|
IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false);
|
||||||
@ -144,16 +150,26 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
|
|||||||
ResourceIndexedSearchParams existingParamsToPopulate = new ResourceIndexedSearchParams(entity);
|
ResourceIndexedSearchParams existingParamsToPopulate = new ResourceIndexedSearchParams(entity);
|
||||||
existingParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents());
|
existingParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents());
|
||||||
|
|
||||||
dao.reindex(resource, entity);
|
List<String> messages = new ArrayList<>();
|
||||||
|
|
||||||
|
JpaPid pid = JpaPid.fromId(entity.getId());
|
||||||
|
ReindexOutcome outcome = dao.reindex(pid, new ReindexParameters(), theRequestDetails, new TransactionDetails());
|
||||||
|
messages.add("Reindex completed in " + sw);
|
||||||
|
|
||||||
|
for (String next : outcome.getWarnings()) {
|
||||||
|
messages.add("WARNING: " + next);
|
||||||
|
}
|
||||||
|
|
||||||
ResourceIndexedSearchParams newParamsToPopulate = new ResourceIndexedSearchParams(entity);
|
ResourceIndexedSearchParams newParamsToPopulate = new ResourceIndexedSearchParams(entity);
|
||||||
newParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents());
|
newParamsToPopulate.mySearchParamPresentEntities.addAll(entity.getSearchParamPresents());
|
||||||
|
|
||||||
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, true);
|
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, true, messages);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private Parameters reindexDryRunInTransaction(RequestDetails theRequestDetails, IIdType theResourceId, RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails, Set<String> theParameters) {
|
private Parameters reindexDryRunInTransaction(RequestDetails theRequestDetails, IIdType theResourceId, RequestPartitionId theRequestPartitionId, TransactionDetails theTransactionDetails, Set<String> theParameters) {
|
||||||
|
StopWatch sw = new StopWatch();
|
||||||
|
|
||||||
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType());
|
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceId.getResourceType());
|
||||||
ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails);
|
ResourceTable entity = (ResourceTable) dao.readEntity(theResourceId, theRequestDetails);
|
||||||
IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false);
|
IBaseResource resource = myJpaStorageResourceParser.toResource(entity, false);
|
||||||
@ -186,7 +202,8 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
|
|||||||
showAction = false;
|
showAction = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, showAction);
|
String message = "Reindex dry-run completed in " + sw + ". No changes were committed to any stored data.";
|
||||||
|
return buildIndexResponse(existingParamsToPopulate, newParamsToPopulate, showAction, List.of(message));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@ -197,12 +214,16 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
|
|||||||
|
|
||||||
@Nonnull
|
@Nonnull
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
Parameters buildIndexResponse(ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, boolean theShowAction) {
|
Parameters buildIndexResponse(ResourceIndexedSearchParams theExistingParams, ResourceIndexedSearchParams theNewParams, boolean theShowAction, List<String> theMessages) {
|
||||||
Parameters parameters = new Parameters();
|
Parameters parameters = new Parameters();
|
||||||
|
|
||||||
Parameters.ParametersParameterComponent narrativeParameter = parameters.addParameter();
|
Parameters.ParametersParameterComponent narrativeParameter = parameters.addParameter();
|
||||||
narrativeParameter.setName("Narrative");
|
narrativeParameter.setName("Narrative");
|
||||||
|
|
||||||
|
for (String next : theMessages) {
|
||||||
|
parameters.addParameter("Message", new StringType(next));
|
||||||
|
}
|
||||||
|
|
||||||
// Normal indexes
|
// Normal indexes
|
||||||
addParamsNonMissing(parameters, "CoordinateIndexes", "Coords", theExistingParams.myCoordsParams, theNewParams.myCoordsParams, new CoordsParamPopulator(), theShowAction);
|
addParamsNonMissing(parameters, "CoordinateIndexes", "Coords", theExistingParams.myCoordsParams, theNewParams.myCoordsParams, new CoordsParamPopulator(), theShowAction);
|
||||||
addParamsNonMissing(parameters, "DateIndexes", "Date", theExistingParams.myDateParams, theNewParams.myDateParams, new DateParamPopulator(), theShowAction);
|
addParamsNonMissing(parameters, "DateIndexes", "Date", theExistingParams.myDateParams, theNewParams.myDateParams, new DateParamPopulator(), theShowAction);
|
||||||
|
@ -1,5 +1,15 @@
|
|||||||
<div xmlns:th="http://www.thymeleaf.org">
|
<div xmlns:th="http://www.thymeleaf.org">
|
||||||
|
|
||||||
|
<!--/* Messages */-->
|
||||||
|
<div th:if="${resource.hasParameter('Message')}" id="Messages">
|
||||||
|
<h1>Outcome</h1>
|
||||||
|
<ul>
|
||||||
|
<li th:each="part : ${resource.getParameters('Message')}">
|
||||||
|
[[${part.getValue().getValue()}]]
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!--/* Number Indexes */-->
|
<!--/* Number Indexes */-->
|
||||||
<div th:if="${resource.hasParameter('NumberIndexes')}" id="NumberIndexes">
|
<div th:if="${resource.hasParameter('NumberIndexes')}" id="NumberIndexes">
|
||||||
<h1>Number Indexes</h1>
|
<h1>Number Indexes</h1>
|
||||||
|
@ -1,12 +1,263 @@
|
|||||||
package ca.uhn.fhir.jpa.packages;
|
package ca.uhn.fhir.jpa.packages;
|
||||||
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
|
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||||
|
import ca.uhn.fhir.jpa.dao.tx.NonTransactionalHapiTransactionService;
|
||||||
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
|
import ca.uhn.fhir.jpa.packages.loader.PackageResourceParsingSvc;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistryController;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
|
||||||
|
import org.hl7.fhir.r4.model.CodeSystem;
|
||||||
|
import org.hl7.fhir.r4.model.Communication;
|
||||||
|
import org.hl7.fhir.r4.model.DocumentReference;
|
||||||
|
import org.hl7.fhir.r4.model.Enumerations;
|
||||||
|
import org.hl7.fhir.r4.model.SearchParameter;
|
||||||
|
import org.hl7.fhir.r4.model.Subscription;
|
||||||
|
import org.hl7.fhir.utilities.npm.NpmPackage;
|
||||||
|
import org.hl7.fhir.utilities.npm.PackageGenerator;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
import org.mockito.Captor;
|
||||||
|
import org.mockito.InjectMocks;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
import org.mockito.Spy;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import javax.annotation.Nonnull;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
|
import static org.mockito.Mockito.times;
|
||||||
|
import static org.mockito.Mockito.verify;
|
||||||
|
import static org.mockito.Mockito.verifyNoInteractions;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
public class PackageInstallerSvcImplTest {
|
public class PackageInstallerSvcImplTest {
|
||||||
|
|
||||||
|
public static final String PACKAGE_VERSION = "1.0";
|
||||||
|
public static final String PACKAGE_ID_1 = "package1";
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
private INpmPackageVersionDao myPackageVersionDao;
|
||||||
|
@Mock
|
||||||
|
private IHapiPackageCacheManager myPackageCacheManager;
|
||||||
|
@Mock
|
||||||
|
private ISearchParamRegistryController mySearchParamRegistryController;
|
||||||
|
@Mock
|
||||||
|
private DaoRegistry myDaoRegistry;
|
||||||
|
@Mock
|
||||||
|
private IFhirResourceDao<CodeSystem> myCodeSystemDao;
|
||||||
|
@Spy
|
||||||
|
private FhirContext myCtx = FhirContext.forR4Cached();
|
||||||
|
@Spy
|
||||||
|
private IHapiTransactionService myTxService = new NonTransactionalHapiTransactionService();
|
||||||
|
@Spy
|
||||||
|
private PackageResourceParsingSvc myPackageResourceParsingSvc = new PackageResourceParsingSvc(myCtx);
|
||||||
|
@Spy
|
||||||
|
private PartitionSettings myPartitionSettings = new PartitionSettings();
|
||||||
|
@InjectMocks
|
||||||
|
private PackageInstallerSvcImpl mySvc;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPackageCompatibility() {
|
public void testPackageCompatibility() {
|
||||||
new PackageInstallerSvcImpl().assertFhirVersionsAreCompatible("R4", "R4B");
|
mySvc.assertFhirVersionsAreCompatible("R4", "R4B");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_SearchParameterWithMetaParam() {
|
||||||
|
SearchParameter sp = new SearchParameter();
|
||||||
|
sp.setCode("_id");
|
||||||
|
assertFalse(mySvc.validForUpload(sp));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_SearchParameterWithNoBase() {
|
||||||
|
SearchParameter sp = new SearchParameter();
|
||||||
|
sp.setCode("name");
|
||||||
|
sp.setExpression("Patient.name");
|
||||||
|
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
assertFalse(mySvc.validForUpload(sp));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_SearchParameterWithNoExpression() {
|
||||||
|
SearchParameter sp = new SearchParameter();
|
||||||
|
sp.setCode("name");
|
||||||
|
sp.addBase("Patient");
|
||||||
|
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
assertFalse(mySvc.validForUpload(sp));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_GoodSearchParameter() {
|
||||||
|
SearchParameter sp = new SearchParameter();
|
||||||
|
sp.setCode("name");
|
||||||
|
sp.addBase("Patient");
|
||||||
|
sp.setExpression("Patient.name");
|
||||||
|
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
assertTrue(mySvc.validForUpload(sp));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_RequestedSubscription() {
|
||||||
|
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
|
||||||
|
new Subscription.SubscriptionChannelComponent()
|
||||||
|
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
|
||||||
|
.setEndpoint("https://tinyurl.com/2p95e27r");
|
||||||
|
Subscription subscription = new Subscription();
|
||||||
|
subscription.setCriteria("Patient?name=smith");
|
||||||
|
subscription.setChannel(subscriptionChannelComponent);
|
||||||
|
subscription.setStatus(Subscription.SubscriptionStatus.REQUESTED);
|
||||||
|
assertTrue(mySvc.validForUpload(subscription));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_ErrorSubscription() {
|
||||||
|
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
|
||||||
|
new Subscription.SubscriptionChannelComponent()
|
||||||
|
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
|
||||||
|
.setEndpoint("https://tinyurl.com/2p95e27r");
|
||||||
|
Subscription subscription = new Subscription();
|
||||||
|
subscription.setCriteria("Patient?name=smith");
|
||||||
|
subscription.setChannel(subscriptionChannelComponent);
|
||||||
|
subscription.setStatus(Subscription.SubscriptionStatus.ERROR);
|
||||||
|
assertFalse(mySvc.validForUpload(subscription));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_ActiveSubscription() {
|
||||||
|
Subscription.SubscriptionChannelComponent subscriptionChannelComponent =
|
||||||
|
new Subscription.SubscriptionChannelComponent()
|
||||||
|
.setType(Subscription.SubscriptionChannelType.RESTHOOK)
|
||||||
|
.setEndpoint("https://tinyurl.com/2p95e27r");
|
||||||
|
Subscription subscription = new Subscription();
|
||||||
|
subscription.setCriteria("Patient?name=smith");
|
||||||
|
subscription.setChannel(subscriptionChannelComponent);
|
||||||
|
subscription.setStatus(Subscription.SubscriptionStatus.ACTIVE);
|
||||||
|
assertFalse(mySvc.validForUpload(subscription));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_DocumentRefStatusValuePresent() {
|
||||||
|
DocumentReference documentReference = new DocumentReference();
|
||||||
|
documentReference.setStatus(Enumerations.DocumentReferenceStatus.ENTEREDINERROR);
|
||||||
|
assertTrue(mySvc.validForUpload(documentReference));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_DocumentRefStatusValueNull() {
|
||||||
|
DocumentReference documentReference = new DocumentReference();
|
||||||
|
documentReference.setStatus(Enumerations.DocumentReferenceStatus.NULL);
|
||||||
|
assertFalse(mySvc.validForUpload(documentReference));
|
||||||
|
documentReference.setStatus(null);
|
||||||
|
assertFalse(mySvc.validForUpload(documentReference));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_CommunicationStatusValuePresent() {
|
||||||
|
Communication communication = new Communication();
|
||||||
|
communication.setStatus(Communication.CommunicationStatus.NOTDONE);
|
||||||
|
assertTrue(mySvc.validForUpload(communication));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testValidForUpload_CommunicationStatusValueNull() {
|
||||||
|
Communication communication = new Communication();
|
||||||
|
communication.setStatus(Communication.CommunicationStatus.NULL);
|
||||||
|
assertFalse(mySvc.validForUpload(communication));
|
||||||
|
communication.setStatus(null);
|
||||||
|
assertFalse(mySvc.validForUpload(communication));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDontTryToInstallDuplicateCodeSystem_CodeSystemAlreadyExistsWithDifferentId() throws IOException {
|
||||||
|
// Setup
|
||||||
|
|
||||||
|
// The CodeSystem that is already saved in the repository
|
||||||
|
CodeSystem existingCs = new CodeSystem();
|
||||||
|
existingCs.setId("CodeSystem/existingcs");
|
||||||
|
existingCs.setUrl("http://my-code-system");
|
||||||
|
existingCs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
|
||||||
|
|
||||||
|
// A new code system in a package we're installing that has the
|
||||||
|
// same URL as the previously saved one, but a different ID.
|
||||||
|
CodeSystem cs = new CodeSystem();
|
||||||
|
cs.setId("CodeSystem/mycs");
|
||||||
|
cs.setUrl("http://my-code-system");
|
||||||
|
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
|
||||||
|
|
||||||
|
NpmPackage pkg = createPackage(cs, PACKAGE_ID_1);
|
||||||
|
|
||||||
|
when(myPackageVersionDao.findByPackageIdAndVersion(any(), any())).thenReturn(Optional.empty());
|
||||||
|
when(myPackageCacheManager.installPackage(any())).thenReturn(pkg);
|
||||||
|
when(myDaoRegistry.getResourceDao(CodeSystem.class)).thenReturn(myCodeSystemDao);
|
||||||
|
when(myCodeSystemDao.search(any(), any())).thenReturn(new SimpleBundleProvider(existingCs));
|
||||||
|
when(myCodeSystemDao.update(any(),any(RequestDetails.class))).thenReturn(new DaoMethodOutcome());
|
||||||
|
|
||||||
|
PackageInstallationSpec spec = new PackageInstallationSpec();
|
||||||
|
spec.setName(PACKAGE_ID_1);
|
||||||
|
spec.setVersion(PACKAGE_VERSION);
|
||||||
|
spec.setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL);
|
||||||
|
spec.setPackageContents(packageToBytes(pkg));
|
||||||
|
|
||||||
|
// Test
|
||||||
|
mySvc.install(spec);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
verify(myCodeSystemDao, times(1)).search(mySearchParameterMapCaptor.capture(), any());
|
||||||
|
SearchParameterMap map = mySearchParameterMapCaptor.getValue();
|
||||||
|
assertEquals("?url=http%3A%2F%2Fmy-code-system", map.toNormalizedQueryString(myCtx));
|
||||||
|
|
||||||
|
verify(myCodeSystemDao, times(1)).update(myCodeSystemCaptor.capture(), any(RequestDetails.class));
|
||||||
|
CodeSystem codeSystem = myCodeSystemCaptor.getValue();
|
||||||
|
assertEquals("existingcs", codeSystem.getIdPart());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nonnull
|
||||||
|
private static byte[] packageToBytes(NpmPackage pkg) throws IOException {
|
||||||
|
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||||
|
pkg.save(stream);
|
||||||
|
byte[] bytes = stream.toByteArray();
|
||||||
|
return bytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Captor
|
||||||
|
private ArgumentCaptor<SearchParameterMap> mySearchParameterMapCaptor;
|
||||||
|
@Captor
|
||||||
|
private ArgumentCaptor<CodeSystem> myCodeSystemCaptor;
|
||||||
|
|
||||||
|
@Nonnull
|
||||||
|
private NpmPackage createPackage(CodeSystem cs, String packageId) throws IOException {
|
||||||
|
PackageGenerator manifestGenerator = new PackageGenerator();
|
||||||
|
manifestGenerator.name(packageId);
|
||||||
|
manifestGenerator.version(PACKAGE_VERSION);
|
||||||
|
manifestGenerator.description("a package");
|
||||||
|
manifestGenerator.fhirVersions(List.of(FhirVersionEnum.R4.getFhirVersionString()));
|
||||||
|
|
||||||
|
NpmPackage pkg = NpmPackage.empty(manifestGenerator);
|
||||||
|
|
||||||
|
String csString = myCtx.newJsonParser().encodeResourceToString(cs);
|
||||||
|
pkg.addFile("package", "cs.json", csString.getBytes(StandardCharsets.UTF_8), "CodeSystem");
|
||||||
|
|
||||||
|
return pkg;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
@ -127,7 +127,7 @@ public class PersistObservationIndexedSearchParamLastNR4IT {
|
|||||||
ResourceTable entity = new ResourceTable();
|
ResourceTable entity = new ResourceTable();
|
||||||
entity.setId(55L);
|
entity.setId(55L);
|
||||||
entity.setResourceType("Observation");
|
entity.setResourceType("Observation");
|
||||||
entity.setVersion(0L);
|
entity.setVersionForUnitTest(0L);
|
||||||
|
|
||||||
testObservationPersist.deleteObservationIndex(entity);
|
testObservationPersist.deleteObservationIndex(entity);
|
||||||
elasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
elasticsearchSvc.refreshIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -20,6 +20,7 @@ import org.mockito.MockitoAnnotations;
|
|||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
import static org.awaitility.Awaitility.await;
|
import static org.awaitility.Awaitility.await;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
@ -108,4 +109,19 @@ public abstract class BaseMdmHelper implements BeforeEachCallback, AfterEachCall
|
|||||||
public PointcutLatch getAfterMdmLatch() {
|
public PointcutLatch getAfterMdmLatch() {
|
||||||
return myAfterMdmLatch;
|
return myAfterMdmLatch;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expect 1 call to the MDM_AFTER_PERSISTED_RESOURCE_CHECKED pointcut when calling theSupplier. Wait until
|
||||||
|
* the mdm message arrives and this pointcut is called before returning the result of theSupplier.
|
||||||
|
* @param theSupplier
|
||||||
|
* @return
|
||||||
|
* @param <T>
|
||||||
|
* @throws InterruptedException
|
||||||
|
*/
|
||||||
|
public <T> T executeWithLatch(Supplier<T> theSupplier) throws InterruptedException {
|
||||||
|
myAfterMdmLatch.setExpectedCount(1);
|
||||||
|
T retval = theSupplier.get();
|
||||||
|
myAfterMdmLatch.awaitExpected();
|
||||||
|
return retval;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,20 @@
|
|||||||
package ca.uhn.fhir.jpa.mdm.helper;
|
package ca.uhn.fhir.jpa.mdm.helper;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||||
|
import ca.uhn.fhir.mdm.api.MdmLinkEvent;
|
||||||
import ca.uhn.fhir.rest.server.TransactionLogMessages;
|
import ca.uhn.fhir.rest.server.TransactionLogMessages;
|
||||||
|
import ca.uhn.fhir.rest.server.messaging.ResourceOperationMessage;
|
||||||
|
import ca.uhn.test.concurrency.PointcutLatch;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_GOLDEN_RECORD;
|
import static ca.uhn.fhir.mdm.api.MdmConstants.CODE_GOLDEN_RECORD;
|
||||||
import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS;
|
import static ca.uhn.fhir.mdm.api.MdmConstants.SYSTEM_GOLDEN_RECORD_STATUS;
|
||||||
@ -27,8 +32,8 @@ public class MdmHelperR4 extends BaseMdmHelper {
|
|||||||
public OutcomeAndLogMessageWrapper createWithLatch(IBaseResource theBaseResource, boolean isExternalHttpRequest) throws InterruptedException {
|
public OutcomeAndLogMessageWrapper createWithLatch(IBaseResource theBaseResource, boolean isExternalHttpRequest) throws InterruptedException {
|
||||||
myAfterMdmLatch.setExpectedCount(1);
|
myAfterMdmLatch.setExpectedCount(1);
|
||||||
DaoMethodOutcome daoMethodOutcome = doCreateResource(theBaseResource, isExternalHttpRequest);
|
DaoMethodOutcome daoMethodOutcome = doCreateResource(theBaseResource, isExternalHttpRequest);
|
||||||
myAfterMdmLatch.awaitExpected();
|
List<HookParams> hookParams = myAfterMdmLatch.awaitExpected();
|
||||||
return new OutcomeAndLogMessageWrapper(daoMethodOutcome, myAfterMdmLatch.getLatchInvocationParameterOfType(TransactionLogMessages.class));
|
return new OutcomeAndLogMessageWrapper(daoMethodOutcome, hookParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
public OutcomeAndLogMessageWrapper updateWithLatch(IBaseResource theIBaseResource) throws InterruptedException {
|
public OutcomeAndLogMessageWrapper updateWithLatch(IBaseResource theIBaseResource) throws InterruptedException {
|
||||||
@ -38,8 +43,8 @@ public class MdmHelperR4 extends BaseMdmHelper {
|
|||||||
public OutcomeAndLogMessageWrapper updateWithLatch(IBaseResource theIBaseResource, boolean isExternalHttpRequest) throws InterruptedException {
|
public OutcomeAndLogMessageWrapper updateWithLatch(IBaseResource theIBaseResource, boolean isExternalHttpRequest) throws InterruptedException {
|
||||||
myAfterMdmLatch.setExpectedCount(1);
|
myAfterMdmLatch.setExpectedCount(1);
|
||||||
DaoMethodOutcome daoMethodOutcome = doUpdateResource(theIBaseResource, isExternalHttpRequest);
|
DaoMethodOutcome daoMethodOutcome = doUpdateResource(theIBaseResource, isExternalHttpRequest);
|
||||||
myAfterMdmLatch.awaitExpected();
|
List<HookParams> hookParams = myAfterMdmLatch.awaitExpected();
|
||||||
return new OutcomeAndLogMessageWrapper(daoMethodOutcome, myAfterMdmLatch.getLatchInvocationParameterOfType(TransactionLogMessages.class));
|
return new OutcomeAndLogMessageWrapper(daoMethodOutcome, hookParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
public DaoMethodOutcome doCreateResource(IBaseResource theResource, boolean isExternalHttpRequest) {
|
public DaoMethodOutcome doCreateResource(IBaseResource theResource, boolean isExternalHttpRequest) {
|
||||||
@ -68,12 +73,12 @@ public class MdmHelperR4 extends BaseMdmHelper {
|
|||||||
* by the MDM module.
|
* by the MDM module.
|
||||||
*/
|
*/
|
||||||
public class OutcomeAndLogMessageWrapper {
|
public class OutcomeAndLogMessageWrapper {
|
||||||
DaoMethodOutcome myDaoMethodOutcome;
|
private final DaoMethodOutcome myDaoMethodOutcome;
|
||||||
TransactionLogMessages myLogMessages;
|
private final List<HookParams> myHookParams;
|
||||||
|
|
||||||
private OutcomeAndLogMessageWrapper(DaoMethodOutcome theDaoMethodOutcome, TransactionLogMessages theTransactionLogMessages) {
|
public OutcomeAndLogMessageWrapper(DaoMethodOutcome theDaoMethodOutcome, List<HookParams> theHookParams) {
|
||||||
myDaoMethodOutcome = theDaoMethodOutcome;
|
myDaoMethodOutcome = theDaoMethodOutcome;
|
||||||
myLogMessages = theTransactionLogMessages;
|
myHookParams = theHookParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DaoMethodOutcome getDaoMethodOutcome() {
|
public DaoMethodOutcome getDaoMethodOutcome() {
|
||||||
@ -81,7 +86,19 @@ public class MdmHelperR4 extends BaseMdmHelper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public TransactionLogMessages getLogMessages() {
|
public TransactionLogMessages getLogMessages() {
|
||||||
return myLogMessages;
|
return PointcutLatch.getInvocationParameterOfType(myHookParams, TransactionLogMessages.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<HookParams> getHookParams() {
|
||||||
|
return myHookParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MdmLinkEvent getMdmLinkEvent() {
|
||||||
|
return PointcutLatch.getInvocationParameterOfType(myHookParams, MdmLinkEvent.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ResourceOperationMessage getResourceOperationMessage() {
|
||||||
|
return PointcutLatch.getInvocationParameterOfType(myHookParams, ResourceOperationMessage.class);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,9 +58,9 @@ public class MdmEventIT extends BaseMdmR4Test {
|
|||||||
addExternalEID(patient2, "eid-11");
|
addExternalEID(patient2, "eid-11");
|
||||||
addExternalEID(patient2, "eid-22");
|
addExternalEID(patient2, "eid-22");
|
||||||
|
|
||||||
myMdmHelper.updateWithLatch(patient2);
|
MdmHelperR4.OutcomeAndLogMessageWrapper outcome = myMdmHelper.updateWithLatch(patient2);
|
||||||
|
|
||||||
MdmLinkEvent linkChangeEvent = myMdmHelper.getAfterMdmLatch().getLatchInvocationParameterOfType(MdmLinkEvent.class);
|
MdmLinkEvent linkChangeEvent = outcome.getMdmLinkEvent();
|
||||||
assertNotNull(linkChangeEvent);
|
assertNotNull(linkChangeEvent);
|
||||||
|
|
||||||
ourLog.info("Got event: {}", linkChangeEvent);
|
ourLog.info("Got event: {}", linkChangeEvent);
|
||||||
@ -84,15 +84,15 @@ public class MdmEventIT extends BaseMdmR4Test {
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateLinkChangeEvent() throws InterruptedException {
|
public void testCreateLinkChangeEvent() throws InterruptedException {
|
||||||
Practitioner pr = buildPractitionerWithNameAndId("Young", "AC-DC");
|
Practitioner pr = buildPractitionerWithNameAndId("Young", "AC-DC");
|
||||||
myMdmHelper.createWithLatch(pr);
|
MdmHelperR4.OutcomeAndLogMessageWrapper outcome = myMdmHelper.createWithLatch(pr);
|
||||||
|
|
||||||
ResourceOperationMessage resourceOperationMessage = myMdmHelper.getAfterMdmLatch().getLatchInvocationParameterOfType(ResourceOperationMessage.class);
|
ResourceOperationMessage resourceOperationMessage = outcome.getResourceOperationMessage();
|
||||||
assertNotNull(resourceOperationMessage);
|
assertNotNull(resourceOperationMessage);
|
||||||
assertEquals(pr.getIdElement().toUnqualifiedVersionless().getValue(), resourceOperationMessage.getId());
|
assertEquals(pr.getIdElement().toUnqualifiedVersionless().getValue(), resourceOperationMessage.getId());
|
||||||
|
|
||||||
MdmLink link = getLinkByTargetId(pr);
|
MdmLink link = getLinkByTargetId(pr);
|
||||||
|
|
||||||
MdmLinkEvent linkChangeEvent = myMdmHelper.getAfterMdmLatch().getLatchInvocationParameterOfType(MdmLinkEvent.class);
|
MdmLinkEvent linkChangeEvent = outcome.getMdmLinkEvent();
|
||||||
assertNotNull(linkChangeEvent);
|
assertNotNull(linkChangeEvent);
|
||||||
|
|
||||||
assertEquals(1, linkChangeEvent.getMdmLinks().size());
|
assertEquals(1, linkChangeEvent.getMdmLinks().size());
|
||||||
@ -110,9 +110,9 @@ public class MdmEventIT extends BaseMdmR4Test {
|
|||||||
@Test
|
@Test
|
||||||
public void testUpdateLinkChangeEvent() throws InterruptedException {
|
public void testUpdateLinkChangeEvent() throws InterruptedException {
|
||||||
Patient patient1 = addExternalEID(buildJanePatient(), "eid-1");
|
Patient patient1 = addExternalEID(buildJanePatient(), "eid-1");
|
||||||
myMdmHelper.createWithLatch(patient1);
|
MdmHelperR4.OutcomeAndLogMessageWrapper outcome = myMdmHelper.createWithLatch(patient1);
|
||||||
|
|
||||||
MdmLinkEvent linkChangeEvent = myMdmHelper.getAfterMdmLatch().getLatchInvocationParameterOfType(MdmLinkEvent.class);
|
MdmLinkEvent linkChangeEvent = outcome.getMdmLinkEvent();
|
||||||
assertNotNull(linkChangeEvent);
|
assertNotNull(linkChangeEvent);
|
||||||
assertEquals(1, linkChangeEvent.getMdmLinks().size());
|
assertEquals(1, linkChangeEvent.getMdmLinks().size());
|
||||||
|
|
||||||
|
@ -257,12 +257,11 @@ public class MdmSearchExpandingInterceptorIT extends BaseMdmR4Test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testReferenceExpansionQuietlyFailsOnMissingMdmMatches() {
|
public void testReferenceExpansionQuietlyFailsOnMissingMdmMatches() throws InterruptedException {
|
||||||
myStorageSettings.setAllowMdmExpansion(true);
|
myStorageSettings.setAllowMdmExpansion(true);
|
||||||
Patient patient = buildJanePatient();
|
Patient patient = buildJanePatient();
|
||||||
patient.getMeta().addTag(MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_NO_MDM_MANAGED, "Don't MDM on me!");
|
patient.getMeta().addTag(MdmConstants.SYSTEM_MDM_MANAGED, MdmConstants.CODE_NO_MDM_MANAGED, "Don't MDM on me!");
|
||||||
DaoMethodOutcome daoMethodOutcome = myMdmHelper.doCreateResource(patient, true);
|
String id = myMdmHelper.executeWithLatch(() -> myMdmHelper.doCreateResource(patient, true)).getId().getIdPart();
|
||||||
String id = daoMethodOutcome.getId().getIdPart();
|
|
||||||
createObservationWithSubject(id);
|
createObservationWithSubject(id);
|
||||||
|
|
||||||
//Even though the user has NO mdm links, that should not cause a request failure.
|
//Even though the user has NO mdm links, that should not cause a request failure.
|
||||||
|
@ -32,7 +32,7 @@ public abstract class BaseProviderR4Test extends BaseMdmR4Test {
|
|||||||
@Autowired
|
@Autowired
|
||||||
private IMdmSubmitSvc myMdmSubmitSvc;
|
private IMdmSubmitSvc myMdmSubmitSvc;
|
||||||
@Autowired
|
@Autowired
|
||||||
private MdmSettings myMdmSettings;
|
protected MdmSettings myMdmSettings;
|
||||||
@Autowired
|
@Autowired
|
||||||
private MdmControllerHelper myMdmHelper;
|
private MdmControllerHelper myMdmHelper;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package ca.uhn.fhir.jpa.mdm.provider;
|
package ca.uhn.fhir.jpa.mdm.provider;
|
||||||
|
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||||
import ca.uhn.fhir.mdm.api.MdmConstants;
|
import ca.uhn.fhir.mdm.api.MdmConstants;
|
||||||
@ -73,6 +73,7 @@ public class MdmProviderCreateLinkR4Test extends BaseLinkR4Test {
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateLinkWithMatchResultOnDifferentPartitions() {
|
public void testCreateLinkWithMatchResultOnDifferentPartitions() {
|
||||||
myPartitionSettings.setPartitioningEnabled(true);
|
myPartitionSettings.setPartitioningEnabled(true);
|
||||||
|
myMdmSettings.setSearchAllPartitionForMatch(false);
|
||||||
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(1).setName(PARTITION_1), null);
|
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(1).setName(PARTITION_1), null);
|
||||||
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(2).setName(PARTITION_2), null);
|
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(2).setName(PARTITION_2), null);
|
||||||
assertLinkCount(1);
|
assertLinkCount(1);
|
||||||
|
@ -148,6 +148,7 @@ public class MdmProviderMergeGoldenResourcesR4Test extends BaseProviderR4Test {
|
|||||||
@Test
|
@Test
|
||||||
public void testMergeOnDifferentPartitions() {
|
public void testMergeOnDifferentPartitions() {
|
||||||
myPartitionSettings.setPartitioningEnabled(true);
|
myPartitionSettings.setPartitioningEnabled(true);
|
||||||
|
myMdmSettings.setSearchAllPartitionForMatch(false);
|
||||||
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(1).setName(PARTITION_1), null);
|
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(1).setName(PARTITION_1), null);
|
||||||
RequestPartitionId requestPartitionId1 = RequestPartitionId.fromPartitionId(1);
|
RequestPartitionId requestPartitionId1 = RequestPartitionId.fromPartitionId(1);
|
||||||
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(2).setName(PARTITION_2), null);
|
myPartitionLookupSvc.createPartition(new PartitionEntity().setId(2).setName(PARTITION_2), null);
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -153,10 +153,6 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase
|
|||||||
myUpdated = theUpdated;
|
myUpdated = theUpdated;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setUpdated(InstantDt theUpdated) {
|
|
||||||
myUpdated = theUpdated.getValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public abstract long getVersion();
|
public abstract long getVersion();
|
||||||
|
|
||||||
|
@ -27,6 +27,7 @@ import ca.uhn.fhir.jpa.model.search.ResourceTableRoutingBinder;
|
|||||||
import ca.uhn.fhir.jpa.model.search.SearchParamTextPropertyBinder;
|
import ca.uhn.fhir.jpa.model.search.SearchParamTextPropertyBinder;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||||
import org.hibernate.Session;
|
import org.hibernate.Session;
|
||||||
@ -59,6 +60,7 @@ import javax.persistence.Index;
|
|||||||
import javax.persistence.NamedEntityGraph;
|
import javax.persistence.NamedEntityGraph;
|
||||||
import javax.persistence.OneToMany;
|
import javax.persistence.OneToMany;
|
||||||
import javax.persistence.OneToOne;
|
import javax.persistence.OneToOne;
|
||||||
|
import javax.persistence.PostPersist;
|
||||||
import javax.persistence.PrePersist;
|
import javax.persistence.PrePersist;
|
||||||
import javax.persistence.PreUpdate;
|
import javax.persistence.PreUpdate;
|
||||||
import javax.persistence.Table;
|
import javax.persistence.Table;
|
||||||
@ -67,12 +69,13 @@ import javax.persistence.Version;
|
|||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@Indexed(routingBinder= @RoutingBinderRef(type = ResourceTableRoutingBinder.class))
|
@Indexed(routingBinder = @RoutingBinderRef(type = ResourceTableRoutingBinder.class))
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = ResourceTable.HFJ_RESOURCE, uniqueConstraints = {}, indexes = {
|
@Table(name = ResourceTable.HFJ_RESOURCE, uniqueConstraints = {}, indexes = {
|
||||||
// Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE
|
// Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE
|
||||||
@ -83,23 +86,22 @@ import java.util.stream.Collectors;
|
|||||||
@NamedEntityGraph(name = "Resource.noJoins")
|
@NamedEntityGraph(name = "Resource.noJoins")
|
||||||
public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource<JpaPid> {
|
public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource<JpaPid> {
|
||||||
public static final int RESTYPE_LEN = 40;
|
public static final int RESTYPE_LEN = 40;
|
||||||
private static final int MAX_LANGUAGE_LENGTH = 20;
|
|
||||||
private static final long serialVersionUID = 1L;
|
|
||||||
public static final String HFJ_RESOURCE = "HFJ_RESOURCE";
|
public static final String HFJ_RESOURCE = "HFJ_RESOURCE";
|
||||||
public static final String RES_TYPE = "RES_TYPE";
|
public static final String RES_TYPE = "RES_TYPE";
|
||||||
|
private static final int MAX_LANGUAGE_LENGTH = 20;
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
/**
|
/**
|
||||||
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
|
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
|
||||||
* Note the extra config needed in HS6 for indexing transient props:
|
* Note the extra config needed in HS6 for indexing transient props:
|
||||||
* https://docs.jboss.org/hibernate/search/6.0/migration/html_single/#indexed-transient-requires-configuration
|
* https://docs.jboss.org/hibernate/search/6.0/migration/html_single/#indexed-transient-requires-configuration
|
||||||
*
|
* <p>
|
||||||
* Note that we depend on `myVersion` updated for this field to be indexed.
|
* Note that we depend on `myVersion` updated for this field to be indexed.
|
||||||
*/
|
*/
|
||||||
@Transient
|
@Transient
|
||||||
@FullTextField(name = "myContentText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer")
|
@FullTextField(name = "myContentText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer")
|
||||||
@FullTextField(name = "myContentTextEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
|
@FullTextField(name = "myContentTextEdgeNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
|
||||||
@FullTextField(name = "myContentTextNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
|
@FullTextField(name = "myContentTextNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
|
||||||
@FullTextField(name = "myContentTextPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
|
@FullTextField(name = "myContentTextPhonetic", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
@IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion")))
|
@IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion")))
|
||||||
private String myContentText;
|
private String myContentText;
|
||||||
@ -133,9 +135,9 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
*/
|
*/
|
||||||
@Transient()
|
@Transient()
|
||||||
@FullTextField(name = "myNarrativeText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer")
|
@FullTextField(name = "myNarrativeText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer")
|
||||||
@FullTextField(name = "myNarrativeTextEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
|
@FullTextField(name = "myNarrativeTextEdgeNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
|
||||||
@FullTextField(name = "myNarrativeTextNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
|
@FullTextField(name = "myNarrativeTextNGram", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
|
||||||
@FullTextField(name = "myNarrativeTextPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
|
@FullTextField(name = "myNarrativeTextPhonetic", searchable = Searchable.YES, projectable = Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
@IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion")))
|
@IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion")))
|
||||||
private String myNarrativeText;
|
private String myNarrativeText;
|
||||||
@ -278,18 +280,17 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
@Transient
|
@Transient
|
||||||
private transient boolean myUnchangedInCurrentOperation;
|
private transient boolean myUnchangedInCurrentOperation;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The id of the Resource.
|
* The id of the Resource.
|
||||||
* Will contain either the client-assigned id, or the sequence value.
|
* Will contain either the client-assigned id, or the sequence value.
|
||||||
* Will be null during insert time until the first read.
|
* Will be null during insert time until the first read.
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
@Column(name= "FHIR_ID",
|
@Column(name = "FHIR_ID",
|
||||||
// [A-Za-z0-9\-\.]{1,64} - https://www.hl7.org/fhir/datatypes.html#id
|
// [A-Za-z0-9\-\.]{1,64} - https://www.hl7.org/fhir/datatypes.html#id
|
||||||
length = 64,
|
length = 64,
|
||||||
// we never update this after insert, and the Generator will otherwise "dirty" the object.
|
// we never update this after insert, and the Generator will otherwise "dirty" the object.
|
||||||
updatable = false)
|
updatable = false)
|
||||||
|
|
||||||
// inject the pk for server-assigned sequence ids.
|
// inject the pk for server-assigned sequence ids.
|
||||||
@GeneratorType(when = GenerationTime.INSERT, type = FhirIdGenerator.class)
|
@GeneratorType(when = GenerationTime.INSERT, type = FhirIdGenerator.class)
|
||||||
// Make sure the generator doesn't bump the history version.
|
// Make sure the generator doesn't bump the history version.
|
||||||
@ -305,30 +306,21 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
@Column(name = "SEARCH_URL_PRESENT", nullable = true)
|
@Column(name = "SEARCH_URL_PRESENT", nullable = true)
|
||||||
private Boolean mySearchUrlPresent = false;
|
private Boolean mySearchUrlPresent = false;
|
||||||
|
|
||||||
/**
|
|
||||||
* Populate myFhirId with server-assigned sequence id when no client-id provided.
|
|
||||||
* We eat this complexity during insert to simplify query time with a uniform column.
|
|
||||||
* Server-assigned sequence ids aren't available until just before insertion.
|
|
||||||
* Hibernate calls insert Generators after the pk has been assigned, so we can use myId safely here.
|
|
||||||
*/
|
|
||||||
public static final class FhirIdGenerator implements ValueGenerator<String> {
|
|
||||||
@Override
|
|
||||||
public String generateValue(Session session, Object owner) {
|
|
||||||
ResourceTable that = (ResourceTable) owner;
|
|
||||||
return that.myFhirId != null ? that.myFhirId : that.myId.toString();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Version
|
@Version
|
||||||
@Column(name = "RES_VER")
|
@Column(name = "RES_VER")
|
||||||
private long myVersion;
|
private long myVersion;
|
||||||
|
|
||||||
@OneToMany(mappedBy = "myResourceTable", fetch = FetchType.LAZY)
|
@OneToMany(mappedBy = "myResourceTable", fetch = FetchType.LAZY)
|
||||||
private Collection<ResourceHistoryProvenanceEntity> myProvenance;
|
private Collection<ResourceHistoryProvenanceEntity> myProvenance;
|
||||||
|
|
||||||
@Transient
|
@Transient
|
||||||
private transient ResourceHistoryTable myCurrentVersionEntity;
|
private transient ResourceHistoryTable myCurrentVersionEntity;
|
||||||
|
|
||||||
|
@Transient
|
||||||
|
private transient ResourceHistoryTable myNewVersionEntity;
|
||||||
|
|
||||||
|
@Transient
|
||||||
|
private transient boolean myVersionUpdatedInCurrentTransaction;
|
||||||
|
|
||||||
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false, mappedBy = "myResource")
|
@OneToOne(optional = true, fetch = FetchType.EAGER, cascade = {}, orphanRemoval = false, mappedBy = "myResource")
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private ForcedId myForcedId;
|
private ForcedId myForcedId;
|
||||||
@ -343,6 +335,39 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setting this flag is an indication that we're making changes and the version number will
|
||||||
|
* be incremented in the current transaction. When this is set, calls to {@link #getVersion()}
|
||||||
|
* will be incremented by one.
|
||||||
|
* This flag is cleared in {@link #postPersist()} since at that time the new version number
|
||||||
|
* should be reflected.
|
||||||
|
*/
|
||||||
|
public void markVersionUpdatedInCurrentTransaction() {
|
||||||
|
if (!myVersionUpdatedInCurrentTransaction) {
|
||||||
|
/*
|
||||||
|
* Note that modifying this number doesn't actually directly affect what
|
||||||
|
* gets stored in the database since this is a @Version field and the
|
||||||
|
* value is therefore managed by Hibernate. So in other words, if the
|
||||||
|
* row in the database is updated, it doesn't matter what we set
|
||||||
|
* this field to, hibernate will increment it by one. However, we still
|
||||||
|
* increment it for two reasons:
|
||||||
|
* 1. The value gets used for the version attribute in the ResourceHistoryTable
|
||||||
|
* entity we create for each new version.
|
||||||
|
* 2. For updates to existing resources, there may actually not be any other
|
||||||
|
* changes to this entity so incrementing this is a signal to
|
||||||
|
* Hibernate that something changed and we need to force an entity
|
||||||
|
* update.
|
||||||
|
*/
|
||||||
|
myVersion++;
|
||||||
|
this.myVersionUpdatedInCurrentTransaction = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostPersist
|
||||||
|
public void postPersist() {
|
||||||
|
myVersionUpdatedInCurrentTransaction = false;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ResourceTag addTag(TagDefinition theTag) {
|
public ResourceTag addTag(TagDefinition theTag) {
|
||||||
for (ResourceTag next : getTags()) {
|
for (ResourceTag next : getTags()) {
|
||||||
@ -355,7 +380,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
return tag;
|
return tag;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public String getHashSha256() {
|
public String getHashSha256() {
|
||||||
return myHashSha256;
|
return myHashSha256;
|
||||||
}
|
}
|
||||||
@ -558,6 +582,26 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
return myVersion;
|
return myVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the version on this entity to {@literal 1}. This should only be called
|
||||||
|
* on resources that are not yet persisted. After that time the version number
|
||||||
|
* is managed by hibernate.
|
||||||
|
*/
|
||||||
|
public void initializeVersion() {
|
||||||
|
assert myId == null;
|
||||||
|
myVersion = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Don't call this in any JPA environments, the version will be ignored
|
||||||
|
* since this field is managed by hibernate
|
||||||
|
*/
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setVersionForUnitTest(long theVersion) {
|
||||||
|
myVersion = theVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isDeleted() {
|
public boolean isDeleted() {
|
||||||
return getDeleted() != null;
|
return getDeleted() != null;
|
||||||
@ -568,10 +612,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
setDeleted(null);
|
setDeleted(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setVersion(long theVersion) {
|
|
||||||
myVersion = theVersion;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isHasLinks() {
|
public boolean isHasLinks() {
|
||||||
return myHasLinks;
|
return myHasLinks;
|
||||||
}
|
}
|
||||||
@ -580,6 +620,23 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
myHasLinks = theHasLinks;
|
myHasLinks = theHasLinks;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears all the index population flags, e.g. {@link #isParamsStringPopulated()}
|
||||||
|
*
|
||||||
|
* @since 6.8.0
|
||||||
|
*/
|
||||||
|
public void clearAllParamsPopulated() {
|
||||||
|
myParamsTokenPopulated = false;
|
||||||
|
myParamsCoordsPopulated = false;
|
||||||
|
myParamsDatePopulated = false;
|
||||||
|
myParamsNumberPopulated = false;
|
||||||
|
myParamsStringPopulated = false;
|
||||||
|
myParamsQuantityPopulated = false;
|
||||||
|
myParamsQuantityNormalizedPopulated = false;
|
||||||
|
myParamsUriPopulated = false;
|
||||||
|
myHasLinks = false;
|
||||||
|
}
|
||||||
|
|
||||||
public boolean isParamsComboStringUniquePresent() {
|
public boolean isParamsComboStringUniquePresent() {
|
||||||
if (myParamsComboStringUniquePresent == null) {
|
if (myParamsComboStringUniquePresent == null) {
|
||||||
return false;
|
return false;
|
||||||
@ -689,14 +746,14 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
myUnchangedInCurrentOperation = theUnchangedInCurrentOperation;
|
myUnchangedInCurrentOperation = theUnchangedInCurrentOperation;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setContentText(String theContentText) {
|
|
||||||
myContentText = theContentText;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getContentText() {
|
public String getContentText() {
|
||||||
return myContentText;
|
return myContentText;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setContentText(String theContentText) {
|
||||||
|
myContentText = theContentText;
|
||||||
|
}
|
||||||
|
|
||||||
public void setNarrativeText(String theNarrativeText) {
|
public void setNarrativeText(String theNarrativeText) {
|
||||||
myNarrativeText = theNarrativeText;
|
myNarrativeText = theNarrativeText;
|
||||||
}
|
}
|
||||||
@ -709,12 +766,27 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
mySearchUrlPresent = theSearchUrlPresent;
|
mySearchUrlPresent = theSearchUrlPresent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method creates a new history entity, or might reuse the current one if we've
|
||||||
|
* already created one in the current transaction. This is because we can only increment
|
||||||
|
* the version once in a DB transaction (since hibernate manages that number) so creating
|
||||||
|
* multiple {@link ResourceHistoryTable} entities will result in a constraint error.
|
||||||
|
*/
|
||||||
public ResourceHistoryTable toHistory(boolean theCreateVersionTags) {
|
public ResourceHistoryTable toHistory(boolean theCreateVersionTags) {
|
||||||
ResourceHistoryTable retVal = new ResourceHistoryTable();
|
boolean createVersionTags = theCreateVersionTags;
|
||||||
|
|
||||||
|
ResourceHistoryTable retVal = myNewVersionEntity;
|
||||||
|
if (retVal == null) {
|
||||||
|
retVal = new ResourceHistoryTable();
|
||||||
|
myNewVersionEntity = retVal;
|
||||||
|
} else {
|
||||||
|
// Tags should already be set
|
||||||
|
createVersionTags = false;
|
||||||
|
}
|
||||||
|
|
||||||
retVal.setResourceId(myId);
|
retVal.setResourceId(myId);
|
||||||
retVal.setResourceType(myResourceType);
|
retVal.setResourceType(myResourceType);
|
||||||
retVal.setVersion(myVersion);
|
retVal.setVersion(getVersion());
|
||||||
retVal.setTransientForcedId(getTransientForcedId());
|
retVal.setTransientForcedId(getTransientForcedId());
|
||||||
|
|
||||||
retVal.setPublished(getPublishedDate());
|
retVal.setPublished(getPublishedDate());
|
||||||
@ -725,10 +797,8 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
retVal.setForcedId(getForcedId());
|
retVal.setForcedId(getForcedId());
|
||||||
retVal.setPartitionId(getPartitionId());
|
retVal.setPartitionId(getPartitionId());
|
||||||
|
|
||||||
retVal.getTags().clear();
|
|
||||||
|
|
||||||
retVal.setHasTags(isHasTags());
|
retVal.setHasTags(isHasTags());
|
||||||
if (isHasTags() && theCreateVersionTags) {
|
if (isHasTags() && createVersionTags) {
|
||||||
for (ResourceTag next : getTags()) {
|
for (ResourceTag next : getTags()) {
|
||||||
retVal.addTag(next);
|
retVal.addTag(next);
|
||||||
}
|
}
|
||||||
@ -772,16 +842,16 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
* This is a convenience to avoid loading the version a second time within a single transaction. It is
|
* This is a convenience to avoid loading the version a second time within a single transaction. It is
|
||||||
* not persisted.
|
* not persisted.
|
||||||
*/
|
*/
|
||||||
public void setCurrentVersionEntity(ResourceHistoryTable theCurrentVersionEntity) {
|
public ResourceHistoryTable getCurrentVersionEntity() {
|
||||||
myCurrentVersionEntity = theCurrentVersionEntity;
|
return myCurrentVersionEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is a convenience to avoid loading the version a second time within a single transaction. It is
|
* This is a convenience to avoid loading the version a second time within a single transaction. It is
|
||||||
* not persisted.
|
* not persisted.
|
||||||
*/
|
*/
|
||||||
public ResourceHistoryTable getCurrentVersionEntity() {
|
public void setCurrentVersionEntity(ResourceHistoryTable theCurrentVersionEntity) {
|
||||||
return myCurrentVersionEntity;
|
myCurrentVersionEntity = theCurrentVersionEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -799,8 +869,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
myForcedId = theForcedId;
|
myForcedId = theForcedId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IdDt getIdDt() {
|
public IdDt getIdDt() {
|
||||||
IdDt retVal = new IdDt();
|
IdDt retVal = new IdDt();
|
||||||
@ -808,7 +876,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public IIdType getIdType(FhirContext theContext) {
|
public IIdType getIdType(FhirContext theContext) {
|
||||||
IIdType retVal = theContext.getVersion().newIdType();
|
IIdType retVal = theContext.getVersion().newIdType();
|
||||||
populateId(retVal);
|
populateId(retVal);
|
||||||
@ -830,14 +897,14 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setCreatedByMatchUrl(String theCreatedByMatchUrl) {
|
|
||||||
myCreatedByMatchUrl = theCreatedByMatchUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getCreatedByMatchUrl() {
|
public String getCreatedByMatchUrl() {
|
||||||
return myCreatedByMatchUrl;
|
return myCreatedByMatchUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setCreatedByMatchUrl(String theCreatedByMatchUrl) {
|
||||||
|
myCreatedByMatchUrl = theCreatedByMatchUrl;
|
||||||
|
}
|
||||||
|
|
||||||
public void setLuceneIndexData(ExtendedHSearchIndexData theLuceneIndexData) {
|
public void setLuceneIndexData(ExtendedHSearchIndexData theLuceneIndexData) {
|
||||||
myLuceneIndexData = theLuceneIndexData;
|
myLuceneIndexData = theLuceneIndexData;
|
||||||
}
|
}
|
||||||
@ -862,4 +929,18 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||||||
public void setFhirId(String theFhirId) {
|
public void setFhirId(String theFhirId) {
|
||||||
myFhirId = theFhirId;
|
myFhirId = theFhirId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Populate myFhirId with server-assigned sequence id when no client-id provided.
|
||||||
|
* We eat this complexity during insert to simplify query time with a uniform column.
|
||||||
|
* Server-assigned sequence ids aren't available until just before insertion.
|
||||||
|
* Hibernate calls insert Generators after the pk has been assigned, so we can use myId safely here.
|
||||||
|
*/
|
||||||
|
public static final class FhirIdGenerator implements ValueGenerator<String> {
|
||||||
|
@Override
|
||||||
|
public String generateValue(Session session, Object owner) {
|
||||||
|
ResourceTable that = (ResourceTable) owner;
|
||||||
|
return that.myFhirId != null ? that.myFhirId : that.myId.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -224,6 +224,9 @@ public class SearchParamExtractorService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IBaseResource fetchResourceAtPath(@Nonnull PathAndRef thePathAndRef) {
|
public IBaseResource fetchResourceAtPath(@Nonnull PathAndRef thePathAndRef) {
|
||||||
|
if (thePathAndRef.getRef() == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
return findContainedResource(containedResources, thePathAndRef.getRef());
|
return findContainedResource(containedResources, thePathAndRef.getRef());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -94,7 +94,7 @@ public class SearchParamRegistryImplTest {
|
|||||||
ResourceTable searchParamEntity = new ResourceTable();
|
ResourceTable searchParamEntity = new ResourceTable();
|
||||||
searchParamEntity.setResourceType("SearchParameter");
|
searchParamEntity.setResourceType("SearchParameter");
|
||||||
searchParamEntity.setId(theId);
|
searchParamEntity.setId(theId);
|
||||||
searchParamEntity.setVersion(theVersion);
|
searchParamEntity.setVersionForUnitTest(theVersion);
|
||||||
return searchParamEntity;
|
return searchParamEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -199,7 +199,7 @@ public class SearchParamRegistryImplTest {
|
|||||||
|
|
||||||
// Update the resource without changing anything that would affect our cache
|
// Update the resource without changing anything that would affect our cache
|
||||||
ResourceTable lastEntity = newEntities.get(newEntities.size() - 1);
|
ResourceTable lastEntity = newEntities.get(newEntities.size() - 1);
|
||||||
lastEntity.setVersion(2);
|
lastEntity.setVersionForUnitTest(2);
|
||||||
resetMock(Enumerations.PublicationStatus.ACTIVE, newEntities);
|
resetMock(Enumerations.PublicationStatus.ACTIVE, newEntities);
|
||||||
mySearchParamRegistry.requestRefresh();
|
mySearchParamRegistry.requestRefresh();
|
||||||
assertResult(mySearchParamRegistry.refreshCacheIfNecessary(), 0, 1, 0);
|
assertResult(mySearchParamRegistry.refreshCacheIfNecessary(), 0, 1, 0);
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
package ca.uhn.fhir.jpa.subscription.channel.subscription;
|
package ca.uhn.fhir.jpa.subscription.channel.subscription;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@ -60,6 +61,7 @@ class SubscriptionChannelCache {
|
|||||||
return myCache.containsKey(theChannelName);
|
return myCache.containsKey(theChannelName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
void logForUnitTest() {
|
void logForUnitTest() {
|
||||||
for (String key : myCache.keySet()) {
|
for (String key : myCache.keySet()) {
|
||||||
ourLog.info("SubscriptionChannelCache: {}", key);
|
ourLog.info("SubscriptionChannelCache: {}", key);
|
||||||
|
@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.subscription.channel.models.ReceivingChannelParameters;
|
|||||||
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
|
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
|
||||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||||
import ca.uhn.fhir.jpa.subscription.model.ChannelRetryConfiguration;
|
import ca.uhn.fhir.jpa.subscription.model.ChannelRetryConfiguration;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.collect.Multimap;
|
import com.google.common.collect.Multimap;
|
||||||
import com.google.common.collect.MultimapBuilder;
|
import com.google.common.collect.MultimapBuilder;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
@ -147,4 +148,9 @@ public class SubscriptionChannelRegistry {
|
|||||||
public synchronized int size() {
|
public synchronized int size() {
|
||||||
return myDeliveryReceiverChannels.size();
|
return myDeliveryReceiverChannels.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void logForUnitTest() {
|
||||||
|
myDeliveryReceiverChannels.logForUnitTest();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
|
|||||||
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
@ -93,17 +92,15 @@ public class SubscriptionRegisteringSubscriber implements MessageHandler {
|
|||||||
// - in order to store partition id in the userdata of the resource for partitioned subscriptions
|
// - in order to store partition id in the userdata of the resource for partitioned subscriptions
|
||||||
// - in case we're processing out of order and a create-then-delete has been processed backwards (or vice versa)
|
// - in case we're processing out of order and a create-then-delete has been processed backwards (or vice versa)
|
||||||
|
|
||||||
IBaseResource payloadResource;
|
|
||||||
IIdType payloadId = payload.getPayloadId(myFhirContext).toUnqualifiedVersionless();
|
IIdType payloadId = payload.getPayloadId(myFhirContext).toUnqualifiedVersionless();
|
||||||
try {
|
|
||||||
IFhirResourceDao<?> subscriptionDao = myDaoRegistry.getResourceDao("Subscription");
|
IFhirResourceDao<?> subscriptionDao = myDaoRegistry.getResourceDao("Subscription");
|
||||||
RequestDetails systemRequestDetails = getPartitionAwareRequestDetails(payload);
|
RequestDetails systemRequestDetails = getPartitionAwareRequestDetails(payload);
|
||||||
payloadResource = subscriptionDao.read(payloadId, systemRequestDetails);
|
IBaseResource payloadResource = subscriptionDao.read(payloadId, systemRequestDetails, true);
|
||||||
if (payloadResource == null) {
|
if (payloadResource == null) {
|
||||||
// Only for unit test
|
// Only for unit test
|
||||||
payloadResource = payload.getPayload(myFhirContext);
|
payloadResource = payload.getPayload(myFhirContext);
|
||||||
}
|
}
|
||||||
} catch (ResourceGoneException e) {
|
if (payloadResource.isDeleted()) {
|
||||||
mySubscriptionRegistry.unregisterSubscriptionIfRegistered(payloadId.getIdPart());
|
mySubscriptionRegistry.unregisterSubscriptionIfRegistered(payloadId.getIdPart());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.topic;
|
|||||||
|
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
|
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_43_50;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
||||||
|
|
||||||
@ -31,11 +32,10 @@ public final class SubscriptionTopicCanonicalizer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// WIP STR5 use elsewhere
|
// WIP STR5 use elsewhere
|
||||||
public static SubscriptionTopic canonicalize(FhirContext theFhirContext, IBaseResource theSubscriptionTopic) {
|
public static SubscriptionTopic canonicalizeTopic(FhirContext theFhirContext, IBaseResource theSubscriptionTopic) {
|
||||||
switch (theFhirContext.getVersion().getVersion()) {
|
switch (theFhirContext.getVersion().getVersion()) {
|
||||||
case R4B:
|
case R4B:
|
||||||
String encoded = theFhirContext.newJsonParser().encodeResourceToString(theSubscriptionTopic);
|
return (SubscriptionTopic) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.SubscriptionTopic) theSubscriptionTopic);
|
||||||
return ourFhirContextR5.newJsonParser().parseResource(SubscriptionTopic.class, encoded);
|
|
||||||
case R5:
|
case R5:
|
||||||
return (SubscriptionTopic) theSubscriptionTopic;
|
return (SubscriptionTopic) theSubscriptionTopic;
|
||||||
default:
|
default:
|
||||||
|
@ -26,11 +26,11 @@ import ca.uhn.fhir.i18n.Msg;
|
|||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionConstants;
|
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionConstants;
|
||||||
import ca.uhn.fhir.rest.param.TokenParam;
|
import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
|
import ca.uhn.fhir.util.Logs;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r5.model.Enumerations;
|
import org.hl7.fhir.r5.model.Enumerations;
|
||||||
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
@ -40,7 +40,7 @@ import java.util.Set;
|
|||||||
|
|
||||||
|
|
||||||
public class SubscriptionTopicLoader extends BaseResourceCacheSynchronizer {
|
public class SubscriptionTopicLoader extends BaseResourceCacheSynchronizer {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTopicLoader.class);
|
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private FhirContext myFhirContext;
|
private FhirContext myFhirContext;
|
||||||
@ -107,10 +107,7 @@ public class SubscriptionTopicLoader extends BaseResourceCacheSynchronizer {
|
|||||||
if (theResource instanceof SubscriptionTopic) {
|
if (theResource instanceof SubscriptionTopic) {
|
||||||
return (SubscriptionTopic) theResource;
|
return (SubscriptionTopic) theResource;
|
||||||
} else if (theResource instanceof org.hl7.fhir.r4b.model.SubscriptionTopic) {
|
} else if (theResource instanceof org.hl7.fhir.r4b.model.SubscriptionTopic) {
|
||||||
return myFhirContext.newJsonParser().parseResource(SubscriptionTopic.class, FhirContext.forR4BCached().newJsonParser().encodeResourceToString(theResource));
|
return SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, theResource);
|
||||||
// WIP STR5 VersionConvertorFactory_43_50 when it supports SubscriptionTopic
|
|
||||||
// track here: https://github.com/hapifhir/org.hl7.fhir.core/issues/1212
|
|
||||||
// return (SubscriptionTopic) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.SubscriptionTopic) theResource);
|
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException(Msg.code(2332) + "Only R4B and R5 SubscriptionTopic is currently supported. Found " + theResource.getClass());
|
throw new IllegalArgumentException(Msg.code(2332) + "Only R4B and R5 SubscriptionTopic is currently supported. Found " + theResource.getClass());
|
||||||
}
|
}
|
||||||
|
@ -29,11 +29,11 @@ import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
|
|||||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||||
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
|
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
|
||||||
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
||||||
|
import ca.uhn.fhir.util.Logs;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.messaging.Message;
|
import org.springframework.messaging.Message;
|
||||||
import org.springframework.messaging.MessageHandler;
|
import org.springframework.messaging.MessageHandler;
|
||||||
@ -45,7 +45,7 @@ import java.util.List;
|
|||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
public class SubscriptionTopicMatchingSubscriber implements MessageHandler {
|
public class SubscriptionTopicMatchingSubscriber implements MessageHandler {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTopicMatchingSubscriber.class);
|
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
|
||||||
|
|
||||||
private final FhirContext myFhirContext;
|
private final FhirContext myFhirContext;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.i18n.Msg;
|
|||||||
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
|
import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription;
|
||||||
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
||||||
import ca.uhn.fhir.util.BundleBuilder;
|
import ca.uhn.fhir.util.BundleBuilder;
|
||||||
|
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_43_50;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r5.model.Bundle;
|
import org.hl7.fhir.r5.model.Bundle;
|
||||||
@ -55,11 +56,7 @@ public class SubscriptionTopicPayloadBuilder {
|
|||||||
|
|
||||||
if (fhirVersion == FhirVersionEnum.R4B) {
|
if (fhirVersion == FhirVersionEnum.R4B) {
|
||||||
bundleBuilder.setType(Bundle.BundleType.HISTORY.toCode());
|
bundleBuilder.setType(Bundle.BundleType.HISTORY.toCode());
|
||||||
String serializedSubscriptionStatus = FhirContext.forR5Cached().newJsonParser().encodeResourceToString(subscriptionStatus);
|
subscriptionStatus = VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r5.model.SubscriptionStatus) subscriptionStatus);
|
||||||
subscriptionStatus = myFhirContext.newJsonParser().parseResource(org.hl7.fhir.r4b.model.SubscriptionStatus.class, serializedSubscriptionStatus);
|
|
||||||
// WIP STR5 VersionConvertorFactory_43_50 when it supports SubscriptionStatus
|
|
||||||
// track here: https://github.com/hapifhir/org.hl7.fhir.core/issues/1212
|
|
||||||
// subscriptionStatus = (SubscriptionStatus) VersionConvertorFactory_43_50.convertResource((org.hl7.fhir.r4b.model.SubscriptionStatus) subscriptionStatus);
|
|
||||||
} else if (fhirVersion == FhirVersionEnum.R5) {
|
} else if (fhirVersion == FhirVersionEnum.R5) {
|
||||||
bundleBuilder.setType(Bundle.BundleType.SUBSCRIPTIONNOTIFICATION.toCode());
|
bundleBuilder.setType(Bundle.BundleType.SUBSCRIPTIONNOTIFICATION.toCode());
|
||||||
} else {
|
} else {
|
||||||
|
@ -28,12 +28,12 @@ import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
|||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||||
|
import ca.uhn.fhir.util.Logs;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.r5.model.Enumerations;
|
import org.hl7.fhir.r5.model.Enumerations;
|
||||||
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.messaging.Message;
|
import org.springframework.messaging.Message;
|
||||||
import org.springframework.messaging.MessageHandler;
|
import org.springframework.messaging.MessageHandler;
|
||||||
@ -48,7 +48,8 @@ import javax.annotation.Nonnull;
|
|||||||
* Also validates criteria. If invalid, rejects the subscription without persisting the subscription.
|
* Also validates criteria. If invalid, rejects the subscription without persisting the subscription.
|
||||||
*/
|
*/
|
||||||
public class SubscriptionTopicRegisteringSubscriber implements MessageHandler {
|
public class SubscriptionTopicRegisteringSubscriber implements MessageHandler {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTopicRegisteringSubscriber.class);
|
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private FhirContext myFhirContext;
|
private FhirContext myFhirContext;
|
||||||
@Autowired
|
@Autowired
|
||||||
@ -106,7 +107,7 @@ public class SubscriptionTopicRegisteringSubscriber implements MessageHandler {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalize(myFhirContext, payloadResource);
|
SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, payloadResource);
|
||||||
if (subscriptionTopic.getStatus() == Enumerations.PublicationStatus.ACTIVE) {
|
if (subscriptionTopic.getStatus() == Enumerations.PublicationStatus.ACTIVE) {
|
||||||
mySubscriptionTopicRegistry.register(subscriptionTopic);
|
mySubscriptionTopicRegistry.register(subscriptionTopic);
|
||||||
} else {
|
} else {
|
||||||
|
@ -30,14 +30,15 @@ import ca.uhn.fhir.parser.DataFormatException;
|
|||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||||
|
import ca.uhn.fhir.util.Logs;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
public class SubscriptionTopicValidatingInterceptor {
|
public class SubscriptionTopicValidatingInterceptor {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTopicValidatingInterceptor.class);
|
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
|
||||||
|
|
||||||
private final FhirContext myFhirContext;
|
private final FhirContext myFhirContext;
|
||||||
private final SubscriptionQueryValidator mySubscriptionQueryValidator;
|
private final SubscriptionQueryValidator mySubscriptionQueryValidator;
|
||||||
|
|
||||||
@ -69,7 +70,7 @@ public class SubscriptionTopicValidatingInterceptor {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalize(myFhirContext, theSubscription);
|
SubscriptionTopic subscriptionTopic = SubscriptionTopicCanonicalizer.canonicalizeTopic(myFhirContext, theSubscription);
|
||||||
|
|
||||||
boolean finished = false;
|
boolean finished = false;
|
||||||
if (subscriptionTopic.getStatus() == null) {
|
if (subscriptionTopic.getStatus() == null) {
|
||||||
|
@ -24,17 +24,18 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
|
|||||||
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
|
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
|
||||||
|
import ca.uhn.fhir.util.Logs;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.r5.model.Enumeration;
|
import org.hl7.fhir.r5.model.Enumeration;
|
||||||
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
import org.hl7.fhir.r5.model.SubscriptionTopic;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class SubscriptionTriggerMatcher {
|
public class SubscriptionTriggerMatcher {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionTriggerMatcher.class);
|
private static final Logger ourLog = Logs.getSubscriptionTopicLog();
|
||||||
|
|
||||||
private final SubscriptionTopicSupport mySubscriptionTopicSupport;
|
private final SubscriptionTopicSupport mySubscriptionTopicSupport;
|
||||||
private final BaseResourceMessage.OperationTypeEnum myOperation;
|
private final BaseResourceMessage.OperationTypeEnum myOperation;
|
||||||
private final SubscriptionTopic.SubscriptionTopicResourceTriggerComponent myTrigger;
|
private final SubscriptionTopic.SubscriptionTopicResourceTriggerComponent myTrigger;
|
||||||
|
@ -4,17 +4,19 @@ import ca.uhn.fhir.context.FhirContext;
|
|||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
|
||||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
|
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionCanonicalizer;
|
||||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||||
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
|
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
|
||||||
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
|
||||||
|
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||||
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
|
import ca.uhn.fhir.rest.server.messaging.BaseResourceMessage;
|
||||||
import ca.uhn.fhir.rest.server.messaging.json.ResourceOperationJsonMessage;
|
import ca.uhn.fhir.rest.server.messaging.json.ResourceOperationJsonMessage;
|
||||||
|
import org.hl7.fhir.r4.model.InstantType;
|
||||||
import org.hl7.fhir.r4.model.Subscription;
|
import org.hl7.fhir.r4.model.Subscription;
|
||||||
import org.hl7.fhir.r4.model.codesystems.SubscriptionStatus;
|
import org.hl7.fhir.r4.model.codesystems.SubscriptionStatus;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
@ -34,6 +36,7 @@ import java.util.List;
|
|||||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
import static org.mockito.Mockito.never;
|
import static org.mockito.Mockito.never;
|
||||||
import static org.mockito.Mockito.times;
|
import static org.mockito.Mockito.times;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
@ -46,8 +49,8 @@ public class SubscriptionRegisteringSubscriberTest {
|
|||||||
private FhirContext myFhirContext = FhirContext.forR4Cached();
|
private FhirContext myFhirContext = FhirContext.forR4Cached();
|
||||||
@Mock
|
@Mock
|
||||||
private SubscriptionRegistry mySubscriptionRegistry;
|
private SubscriptionRegistry mySubscriptionRegistry;
|
||||||
@Mock
|
@Spy
|
||||||
private SubscriptionCanonicalizer mySubscriptionCanonicalizer;
|
private SubscriptionCanonicalizer mySubscriptionCanonicalizer = new SubscriptionCanonicalizer(myFhirContext);
|
||||||
@Mock
|
@Mock
|
||||||
private DaoRegistry myDaoRegistry;
|
private DaoRegistry myDaoRegistry;
|
||||||
@Mock
|
@Mock
|
||||||
@ -61,8 +64,15 @@ public class SubscriptionRegisteringSubscriberTest {
|
|||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void beforeEach() {
|
public void beforeEach() {
|
||||||
mySubscription = new Subscription();
|
mySubscription = buildSubscription();
|
||||||
mySubscription.setId("Subscription/testrest");
|
}
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private static Subscription buildSubscription() {
|
||||||
|
Subscription subscription = new Subscription();
|
||||||
|
subscription.setId("Subscription/testrest");
|
||||||
|
subscription.setStatus(Subscription.SubscriptionStatus.ACTIVE);
|
||||||
|
return subscription;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -79,7 +89,9 @@ public class SubscriptionRegisteringSubscriberTest {
|
|||||||
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
||||||
|
|
||||||
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
||||||
when(mySubscriptionDao.read(any(), any())).thenThrow(ResourceGoneException.class);
|
Subscription deletedSubscription = buildSubscription();
|
||||||
|
ResourceMetadataKeyEnum.DELETED_AT.put(deletedSubscription, InstantType.withCurrentTime());
|
||||||
|
when(mySubscriptionDao.read(any(), any(), eq(true))).thenReturn(deletedSubscription);
|
||||||
|
|
||||||
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
||||||
verify(mySubscriptionRegistry, times(1)).unregisterSubscriptionIfRegistered(any());
|
verify(mySubscriptionRegistry, times(1)).unregisterSubscriptionIfRegistered(any());
|
||||||
@ -92,7 +104,7 @@ public class SubscriptionRegisteringSubscriberTest {
|
|||||||
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
||||||
|
|
||||||
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
||||||
when(mySubscriptionDao.read(any(), any())).thenReturn(mySubscription);
|
when(mySubscriptionDao.read(any(), any(), eq(true))).thenReturn(mySubscription);
|
||||||
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
|
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
|
||||||
|
|
||||||
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
||||||
@ -106,7 +118,7 @@ public class SubscriptionRegisteringSubscriberTest {
|
|||||||
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
||||||
|
|
||||||
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
||||||
when(mySubscriptionDao.read(any(), any())).thenReturn(mySubscription);
|
when(mySubscriptionDao.read(any(), any(), eq(true))).thenReturn(mySubscription);
|
||||||
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ERROR.toCode());
|
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ERROR.toCode());
|
||||||
|
|
||||||
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
||||||
@ -126,7 +138,7 @@ public class SubscriptionRegisteringSubscriberTest {
|
|||||||
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
||||||
|
|
||||||
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
||||||
when(mySubscriptionDao.read(any(), requestDetailsCaptor.capture())).thenReturn(mySubscription);
|
when(mySubscriptionDao.read(any(), requestDetailsCaptor.capture(), eq(true))).thenReturn(mySubscription);
|
||||||
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
|
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
|
||||||
|
|
||||||
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
||||||
@ -147,7 +159,7 @@ public class SubscriptionRegisteringSubscriberTest {
|
|||||||
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
ResourceModifiedJsonMessage message = new ResourceModifiedJsonMessage(resourceModifiedMessage);
|
||||||
|
|
||||||
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
when(myDaoRegistry.getResourceDao("Subscription")).thenReturn(mySubscriptionDao);
|
||||||
when(mySubscriptionDao.read(any(), requestDetailsCaptor.capture())).thenReturn(mySubscription);
|
when(mySubscriptionDao.read(any(), requestDetailsCaptor.capture(), eq(true))).thenReturn(mySubscription);
|
||||||
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
|
when(mySubscriptionCanonicalizer.getSubscriptionStatus(mySubscription)).thenReturn(SubscriptionStatus.ACTIVE.toCode());
|
||||||
|
|
||||||
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
mySubscriptionRegisteringSubscriber.handleMessage(message);
|
||||||
|
@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.subscription.channel.subscription.IChannelNamer;
|
|||||||
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
|
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
|
||||||
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
|
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
|
||||||
import ca.uhn.fhir.jpa.subscription.module.config.MockFhirClientSearchParamProvider;
|
import ca.uhn.fhir.jpa.subscription.module.config.MockFhirClientSearchParamProvider;
|
||||||
|
import ca.uhn.fhir.jpa.subscription.util.SubscriptionDebugLogInterceptor;
|
||||||
import ca.uhn.fhir.model.primitive.IdDt;
|
import ca.uhn.fhir.model.primitive.IdDt;
|
||||||
import ca.uhn.fhir.system.HapiSystemProperties;
|
import ca.uhn.fhir.system.HapiSystemProperties;
|
||||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
@ -35,6 +36,7 @@ import static org.mockito.Mockito.mock;
|
|||||||
BaseSubscriptionTest.MyConfig.class
|
BaseSubscriptionTest.MyConfig.class
|
||||||
})
|
})
|
||||||
public abstract class BaseSubscriptionTest {
|
public abstract class BaseSubscriptionTest {
|
||||||
|
private static final SubscriptionDebugLogInterceptor ourSubscriptionDebugLogInterceptor = new SubscriptionDebugLogInterceptor();
|
||||||
|
|
||||||
static {
|
static {
|
||||||
HapiSystemProperties.enableUnitTestMode();
|
HapiSystemProperties.enableUnitTestMode();
|
||||||
@ -52,11 +54,13 @@ public abstract class BaseSubscriptionTest {
|
|||||||
@BeforeEach
|
@BeforeEach
|
||||||
public void before() {
|
public void before() {
|
||||||
mySearchParamRegistry.handleInit(Collections.emptyList());
|
mySearchParamRegistry.handleInit(Collections.emptyList());
|
||||||
|
myInterceptorRegistry.registerInterceptor(ourSubscriptionDebugLogInterceptor);
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterEach
|
@AfterEach
|
||||||
public void afterClearAnonymousLambdas() {
|
public void afterClearAnonymousLambdas() {
|
||||||
myInterceptorRegistry.unregisterAllInterceptors();
|
myInterceptorRegistry.unregisterAllInterceptors();
|
||||||
|
myInterceptorRegistry.unregisterInterceptor(ourSubscriptionDebugLogInterceptor);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void initSearchParamRegistry(IBaseResource theReadResource) {
|
public void initSearchParamRegistry(IBaseResource theReadResource) {
|
||||||
@ -68,7 +72,7 @@ public abstract class BaseSubscriptionTest {
|
|||||||
public static class MyConfig {
|
public static class MyConfig {
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public JpaStorageSettings storageSettings() {
|
public JpaStorageSettings jpaStorageSettings() {
|
||||||
return new JpaStorageSettings();
|
return new JpaStorageSettings();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,7 +3,6 @@ package ca.uhn.fhir.jpa.subscription.module.config;
|
|||||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||||
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
|
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
|
||||||
import ca.uhn.fhir.jpa.subscription.match.matcher.matching.InMemorySubscriptionMatcher;
|
import ca.uhn.fhir.jpa.subscription.match.matcher.matching.InMemorySubscriptionMatcher;
|
||||||
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
import ca.uhn.fhir.rest.client.api.IGenericClient;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
@ -25,11 +24,6 @@ public class TestSubscriptionConfig {
|
|||||||
return new PartitionSettings();
|
return new PartitionSettings();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
|
||||||
public StorageSettings storageSettings() {
|
|
||||||
return new StorageSettings();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public IGenericClient fhirClient() {
|
public IGenericClient fhirClient() {
|
||||||
return mock(IGenericClient.class);
|
return mock(IGenericClient.class);
|
||||||
|
@ -11,7 +11,6 @@ import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
|||||||
import ca.uhn.fhir.jpa.subscription.channel.api.ChannelConsumerSettings;
|
import ca.uhn.fhir.jpa.subscription.channel.api.ChannelConsumerSettings;
|
||||||
import ca.uhn.fhir.jpa.subscription.channel.subscription.ISubscriptionDeliveryChannelNamer;
|
import ca.uhn.fhir.jpa.subscription.channel.subscription.ISubscriptionDeliveryChannelNamer;
|
||||||
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
|
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
|
||||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
|
|
||||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||||
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
|
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscription;
|
||||||
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscriptionChannelType;
|
import ca.uhn.fhir.jpa.subscription.model.CanonicalSubscriptionChannelType;
|
||||||
@ -100,8 +99,6 @@ public abstract class BaseBlockingQueueSubscribableChannelDstu3Test extends Base
|
|||||||
@Autowired
|
@Autowired
|
||||||
IInterceptorService myInterceptorRegistry;
|
IInterceptorService myInterceptorRegistry;
|
||||||
@Autowired
|
@Autowired
|
||||||
private SubscriptionLoader mySubscriptionLoader;
|
|
||||||
@Autowired
|
|
||||||
private ISubscriptionDeliveryChannelNamer mySubscriptionDeliveryChannelNamer;
|
private ISubscriptionDeliveryChannelNamer mySubscriptionDeliveryChannelNamer;
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
@ -131,6 +128,8 @@ public abstract class BaseBlockingQueueSubscribableChannelDstu3Test extends Base
|
|||||||
mySubscriptionMatchingPost.clear();
|
mySubscriptionMatchingPost.clear();
|
||||||
mySubscriptionActivatedPost.clear();
|
mySubscriptionActivatedPost.clear();
|
||||||
ourObservationListener.clear();
|
ourObservationListener.clear();
|
||||||
|
mySubscriptionResourceMatched.clear();
|
||||||
|
mySubscriptionResourceNotMatched.clear();
|
||||||
super.clearRegistry();
|
super.clearRegistry();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -148,9 +147,11 @@ public abstract class BaseBlockingQueueSubscribableChannelDstu3Test extends Base
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected Subscription sendSubscription(Subscription theSubscription, RequestPartitionId theRequestPartitionId, Boolean mockDao) throws InterruptedException {
|
protected Subscription sendSubscription(Subscription theSubscription, RequestPartitionId theRequestPartitionId, Boolean mockDao) throws InterruptedException {
|
||||||
|
mySubscriptionResourceNotMatched.setExpectedCount(1);
|
||||||
mySubscriptionActivatedPost.setExpectedCount(1);
|
mySubscriptionActivatedPost.setExpectedCount(1);
|
||||||
Subscription retVal = sendResource(theSubscription, theRequestPartitionId);
|
Subscription retVal = sendResource(theSubscription, theRequestPartitionId);
|
||||||
mySubscriptionActivatedPost.awaitExpected();
|
mySubscriptionActivatedPost.awaitExpected();
|
||||||
|
mySubscriptionResourceNotMatched.awaitExpected();
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,7 +35,9 @@ public class SubscriptionCheckingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
assertEquals(2, mySubscriptionRegistry.size());
|
assertEquals(2, mySubscriptionRegistry.size());
|
||||||
|
|
||||||
ourObservationListener.setExpectedCount(1);
|
ourObservationListener.setExpectedCount(1);
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||||
sendObservation(code, "SNOMED-CT");
|
sendObservation(code, "SNOMED-CT");
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.awaitExpected();
|
ourObservationListener.awaitExpected();
|
||||||
|
|
||||||
assertEquals(1, ourContentTypes.size());
|
assertEquals(1, ourContentTypes.size());
|
||||||
@ -58,7 +60,9 @@ public class SubscriptionCheckingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
assertEquals(2, mySubscriptionRegistry.size());
|
assertEquals(2, mySubscriptionRegistry.size());
|
||||||
|
|
||||||
ourObservationListener.setExpectedCount(1);
|
ourObservationListener.setExpectedCount(1);
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||||
sendObservation(code, "SNOMED-CT");
|
sendObservation(code, "SNOMED-CT");
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.awaitExpected();
|
ourObservationListener.awaitExpected();
|
||||||
|
|
||||||
assertEquals(1, ourContentTypes.size());
|
assertEquals(1, ourContentTypes.size());
|
||||||
@ -82,7 +86,9 @@ public class SubscriptionCheckingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
|
|
||||||
mySubscriptionAfterDelivery.setExpectedCount(1);
|
mySubscriptionAfterDelivery.setExpectedCount(1);
|
||||||
ourObservationListener.setExpectedCount(0);
|
ourObservationListener.setExpectedCount(0);
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||||
sendObservation(code, "SNOMED-CT");
|
sendObservation(code, "SNOMED-CT");
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.clear();
|
ourObservationListener.clear();
|
||||||
mySubscriptionAfterDelivery.awaitExpected();
|
mySubscriptionAfterDelivery.awaitExpected();
|
||||||
|
|
||||||
@ -120,7 +126,9 @@ public class SubscriptionCheckingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
|
|
||||||
observation.setStatus(Observation.ObservationStatus.FINAL);
|
observation.setStatus(Observation.ObservationStatus.FINAL);
|
||||||
|
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||||
sendResource(observation);
|
sendResource(observation);
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.awaitExpected();
|
ourObservationListener.awaitExpected();
|
||||||
|
|
||||||
assertEquals(1, ourContentTypes.size());
|
assertEquals(1, ourContentTypes.size());
|
||||||
|
@ -76,7 +76,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
assertEquals(2, mySubscriptionRegistry.size());
|
assertEquals(2, mySubscriptionRegistry.size());
|
||||||
|
|
||||||
ourObservationListener.setExpectedCount(1);
|
ourObservationListener.setExpectedCount(1);
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||||
sendObservation(code, "SNOMED-CT");
|
sendObservation(code, "SNOMED-CT");
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.awaitExpected();
|
ourObservationListener.awaitExpected();
|
||||||
|
|
||||||
assertEquals(1, ourContentTypes.size());
|
assertEquals(1, ourContentTypes.size());
|
||||||
@ -99,7 +101,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
assertEquals(2, mySubscriptionRegistry.size());
|
assertEquals(2, mySubscriptionRegistry.size());
|
||||||
|
|
||||||
ourObservationListener.setExpectedCount(1);
|
ourObservationListener.setExpectedCount(1);
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||||
sendObservation(code, "SNOMED-CT");
|
sendObservation(code, "SNOMED-CT");
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.awaitExpected();
|
ourObservationListener.awaitExpected();
|
||||||
|
|
||||||
assertEquals(1, ourContentTypes.size());
|
assertEquals(1, ourContentTypes.size());
|
||||||
@ -117,7 +121,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
|
|
||||||
assertEquals(1, mySubscriptionRegistry.size());
|
assertEquals(1, mySubscriptionRegistry.size());
|
||||||
ourObservationListener.setExpectedCount(1);
|
ourObservationListener.setExpectedCount(1);
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||||
sendResource(observation);
|
sendResource(observation);
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.awaitExpected();
|
ourObservationListener.awaitExpected();
|
||||||
|
|
||||||
assertEquals(1, ourContentTypes.size());
|
assertEquals(1, ourContentTypes.size());
|
||||||
@ -141,7 +147,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
|
|
||||||
mySubscriptionAfterDelivery.setExpectedCount(1);
|
mySubscriptionAfterDelivery.setExpectedCount(1);
|
||||||
ourObservationListener.setExpectedCount(0);
|
ourObservationListener.setExpectedCount(0);
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(1);
|
||||||
sendObservation(code, "SNOMED-CT");
|
sendObservation(code, "SNOMED-CT");
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.clear();
|
ourObservationListener.clear();
|
||||||
mySubscriptionAfterDelivery.awaitExpected();
|
mySubscriptionAfterDelivery.awaitExpected();
|
||||||
|
|
||||||
@ -168,7 +176,9 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
assertEquals(3, mySubscriptionRegistry.size());
|
assertEquals(3, mySubscriptionRegistry.size());
|
||||||
|
|
||||||
ourObservationListener.setExpectedCount(2);
|
ourObservationListener.setExpectedCount(2);
|
||||||
|
mySubscriptionResourceMatched.setExpectedCount(2);
|
||||||
sendObservation(code, "SNOMED-CT");
|
sendObservation(code, "SNOMED-CT");
|
||||||
|
mySubscriptionResourceMatched.awaitExpected();
|
||||||
ourObservationListener.awaitExpected();
|
ourObservationListener.awaitExpected();
|
||||||
|
|
||||||
assertEquals(2, ourContentTypes.size());
|
assertEquals(2, ourContentTypes.size());
|
||||||
@ -401,7 +411,7 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||||||
Subscription modifiedSubscription = subscription.copy();
|
Subscription modifiedSubscription = subscription.copy();
|
||||||
// the original partition info was the request info, but we need the actual storage partition.
|
// the original partition info was the request info, but we need the actual storage partition.
|
||||||
modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId);
|
modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId);
|
||||||
when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any())).thenReturn(modifiedSubscription);
|
when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any(), eq(true))).thenReturn(modifiedSubscription);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nested
|
@Nested
|
||||||
|
@ -0,0 +1,20 @@
|
|||||||
|
package ca.uhn.fhir.jpa.topic;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import org.hl7.fhir.r4b.model.Enumerations;
|
||||||
|
import org.hl7.fhir.r4b.model.SubscriptionTopic;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
class SubscriptionTopicCanonicalizerTest {
|
||||||
|
@Test
|
||||||
|
public void testCanonicalizeTopic() {
|
||||||
|
SubscriptionTopic topic = new SubscriptionTopic();
|
||||||
|
topic.setId("123");
|
||||||
|
topic.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||||
|
org.hl7.fhir.r5.model.SubscriptionTopic canonicalized = SubscriptionTopicCanonicalizer.canonicalizeTopic(FhirContext.forR4BCached(), topic);
|
||||||
|
assertEquals("123", canonicalized.getId());
|
||||||
|
assertEquals(org.hl7.fhir.r5.model.Enumerations.PublicationStatus.ACTIVE, canonicalized.getStatus());
|
||||||
|
}
|
||||||
|
}
|
@ -6,7 +6,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
@ -104,7 +104,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
|||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
|
|
||||||
@ -693,7 +692,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
|||||||
IBundleProvider history = myPatientDao.history(null, null, null, mySrd);
|
IBundleProvider history = myPatientDao.history(null, null, null, mySrd);
|
||||||
assertEquals(4 + initialHistory, history.sizeOrThrowNpe());
|
assertEquals(4 + initialHistory, history.sizeOrThrowNpe());
|
||||||
List<IBaseResource> resources = history.getResources(0, 4);
|
List<IBaseResource> resources = history.getResources(0, 4);
|
||||||
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) resources.get(0)));
|
assertTrue(resources.get(0).isDeleted());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
myPatientDao.delete(id2, mySrd);
|
myPatientDao.delete(id2, mySrd);
|
||||||
@ -796,10 +795,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
|||||||
IBundleProvider history = myPatientDao.history(id, null, null, null, mySrd);
|
IBundleProvider history = myPatientDao.history(id, null, null, null, mySrd);
|
||||||
assertEquals(2, history.size().intValue());
|
assertEquals(2, history.size().intValue());
|
||||||
|
|
||||||
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(0, 1).get(0)));
|
assertTrue(history.getResources(0, 1).get(0).isDeleted());
|
||||||
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(0, 1).get(0)).getValue());
|
assertFalse(history.getResources(1, 2).get(0).isDeleted());
|
||||||
assertNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(1, 2).get(0)));
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -1206,13 +1203,13 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
|||||||
assertEquals(id.withVersion("2"), entries.get(1).getIdElement());
|
assertEquals(id.withVersion("2"), entries.get(1).getIdElement());
|
||||||
assertEquals(id.withVersion("1"), entries.get(2).getIdElement());
|
assertEquals(id.withVersion("1"), entries.get(2).getIdElement());
|
||||||
|
|
||||||
assertNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) entries.get(0)));
|
assertFalse(entries.get(0).isDeleted());
|
||||||
assertEquals(BundleEntryTransactionMethodEnum.PUT, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(0)));
|
assertEquals(BundleEntryTransactionMethodEnum.PUT, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(0)));
|
||||||
|
|
||||||
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) entries.get(1)));
|
assertTrue(entries.get(1).isDeleted());
|
||||||
assertEquals(BundleEntryTransactionMethodEnum.DELETE, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(1)));
|
assertEquals(BundleEntryTransactionMethodEnum.DELETE, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(1)));
|
||||||
|
|
||||||
assertNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) entries.get(2)));
|
assertFalse(entries.get(2).isDeleted());
|
||||||
assertEquals(BundleEntryTransactionMethodEnum.POST, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(2)));
|
assertEquals(BundleEntryTransactionMethodEnum.POST, ResourceMetadataKeyEnum.ENTRY_TRANSACTION_METHOD.get((IResource) entries.get(2)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,7 +5,6 @@ import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
|||||||
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
|
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
|
||||||
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.model.api.IResource;
|
|
||||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||||
import ca.uhn.fhir.model.api.TagList;
|
import ca.uhn.fhir.model.api.TagList;
|
||||||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||||
@ -63,6 +62,7 @@ import static org.hamcrest.Matchers.not;
|
|||||||
import static org.hamcrest.Matchers.startsWith;
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
@ -739,10 +739,8 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
|
|||||||
IBundleProvider history = myPatientDao.history(id, null, null, null, mySrd);
|
IBundleProvider history = myPatientDao.history(id, null, null, null, mySrd);
|
||||||
assertEquals(2, history.size().intValue());
|
assertEquals(2, history.size().intValue());
|
||||||
|
|
||||||
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(0, 1).get(0)));
|
assertTrue(history.getResources(0, 1).get(0).isDeleted());
|
||||||
assertNotNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(0, 1).get(0)).getValue());
|
assertFalse(history.getResources(1, 2).get(0).isDeleted());
|
||||||
assertNull(ResourceMetadataKeyEnum.DELETED_AT.get((IResource) history.getResources(1, 2).get(0)));
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
<artifactId>hapi-deployable-pom</artifactId>
|
<artifactId>hapi-deployable-pom</artifactId>
|
||||||
<version>6.7.0-SNAPSHOT</version>
|
<version>6.7.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user