Merge branch 'master' into jm-rehoming-operation

This commit is contained in:
juan.marchionatto 2024-11-15 10:17:05 -05:00
commit 41925cbbf7
472 changed files with 12092 additions and 23079 deletions

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -33,6 +33,7 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public abstract class BaseRuntimeElementDefinition<T extends IBase> { public abstract class BaseRuntimeElementDefinition<T extends IBase> {
@ -40,7 +41,7 @@ public abstract class BaseRuntimeElementDefinition<T extends IBase> {
private final Class<? extends T> myImplementingClass; private final Class<? extends T> myImplementingClass;
private final String myName; private final String myName;
private final boolean myStandardType; private final boolean myStandardType;
private Map<Class<?>, Constructor<T>> myConstructors = Collections.synchronizedMap(new HashMap<>()); private final Map<Class<?>, Constructor<T>> myConstructors = new ConcurrentHashMap<>();
private List<RuntimeChildDeclaredExtensionDefinition> myExtensions = new ArrayList<>(); private List<RuntimeChildDeclaredExtensionDefinition> myExtensions = new ArrayList<>();
private List<RuntimeChildDeclaredExtensionDefinition> myExtensionsModifier = new ArrayList<>(); private List<RuntimeChildDeclaredExtensionDefinition> myExtensionsModifier = new ArrayList<>();
private List<RuntimeChildDeclaredExtensionDefinition> myExtensionsNonModifier = new ArrayList<>(); private List<RuntimeChildDeclaredExtensionDefinition> myExtensionsNonModifier = new ArrayList<>();
@ -84,27 +85,24 @@ public abstract class BaseRuntimeElementDefinition<T extends IBase> {
argumentType = theArgument.getClass(); argumentType = theArgument.getClass();
} }
Constructor<T> retVal = myConstructors.get(argumentType); Constructor<T> retVal = myConstructors.computeIfAbsent(argumentType, type -> {
if (retVal == null) {
for (Constructor<?> next : getImplementingClass().getConstructors()) { for (Constructor<?> next : getImplementingClass().getConstructors()) {
if (argumentType == VOID_CLASS) { if (type == VOID_CLASS) {
if (next.getParameterTypes().length == 0) { if (next.getParameterTypes().length == 0) {
retVal = (Constructor<T>) next; return (Constructor<T>) next;
break;
}
} else if (next.getParameterTypes().length == 1) {
if (next.getParameterTypes()[0].isAssignableFrom(argumentType)) {
retVal = (Constructor<T>) next;
break;
} }
} else if (next.getParameterTypes().length == 1 && next.getParameterTypes()[0].isAssignableFrom(type)) {
return (Constructor<T>) next;
} }
} }
if (retVal == null) { return null;
throw new ConfigurationException(Msg.code(1695) + "Class " + getImplementingClass() });
+ " has no constructor with a single argument of type " + argumentType);
} if (retVal == null) {
myConstructors.put(argumentType, retVal); throw new ConfigurationException(Msg.code(1695) + "Class " + getImplementingClass()
+ " has no constructor with a single argument of type " + argumentType);
} }
return retVal; return retVal;
} }

View File

@ -60,6 +60,7 @@ public class RuntimeSearchParam {
private final List<Component> myComponents; private final List<Component> myComponents;
private final IIdType myIdUnqualifiedVersionless; private final IIdType myIdUnqualifiedVersionless;
private IPhoneticEncoder myPhoneticEncoder; private IPhoneticEncoder myPhoneticEncoder;
private boolean myEnabledForSearching = true;
/** /**
* Constructor * Constructor
@ -166,6 +167,24 @@ public class RuntimeSearchParam {
} }
} }
/**
* Is this search parameter actually enabled for being used in searches (as opposed to only being used for
* generating indexes, which might be desired while the search parameter is still being indexed). This
* setting defaults to {@literal true} if it isn't set otherwise.
*/
public boolean isEnabledForSearching() {
return myEnabledForSearching;
}
/**
* Is this search parameter actually enabled for being used in searches (as opposed to only being used for
* generating indexes, which might be desired while the search parameter is still being indexed). This
* setting defaults to {@literal true} if it isn't set otherwise.
*/
public void setEnabledForSearching(boolean theEnabledForSearching) {
myEnabledForSearching = theEnabledForSearching;
}
public List<Component> getComponents() { public List<Component> getComponents() {
return myComponents; return myComponents;
} }
@ -361,13 +380,6 @@ public class RuntimeSearchParam {
return !myUpliftRefchains.isEmpty(); return !myUpliftRefchains.isEmpty();
} }
public enum RuntimeSearchParamStatusEnum {
ACTIVE,
DRAFT,
RETIRED,
UNKNOWN
}
/** /**
* This method tests whether a given FHIRPath expression <i>could</i> * This method tests whether a given FHIRPath expression <i>could</i>
* possibly apply to the given resource type. * possibly apply to the given resource type.
@ -413,6 +425,13 @@ public class RuntimeSearchParam {
return false; return false;
} }
public enum RuntimeSearchParamStatusEnum {
ACTIVE,
DRAFT,
RETIRED,
UNKNOWN
}
public static class Component { public static class Component {
private final String myExpression; private final String myExpression;
private final String myReference; private final String myReference;

View File

@ -727,7 +727,7 @@ public interface IValidationSupport {
return this; return this;
} }
String getCodeSystemName() { public String getCodeSystemName() {
return myCodeSystemName; return myCodeSystemName;
} }

View File

@ -2174,6 +2174,32 @@ public enum Pointcut implements IPointcut {
"ca.uhn.fhir.rest.api.server.RequestDetails", "ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"), "ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"),
/**
* <b>Storage Hook:</b>
* Invoked when a partition has been deleted, typically meaning the <code>$partition-management-delete-partition</code>
* operation has been invoked.
* <p>
* This hook will only be called if
* partitioning is enabled in the JPA server.
* </p>
* <p>
* Hooks may accept the following parameters:
* </p>
* <ul>
* <li>
* ca.uhn.fhir.interceptor.model.RequestPartitionId - The ID of the partition that was deleted.
* </li>
* </ul>
* <p>
* Hooks must return void.
* </p>
*/
STORAGE_PARTITION_DELETED(
// Return type
void.class,
// Params
"ca.uhn.fhir.interceptor.model.RequestPartitionId"),
/** /**
* <b>Storage Hook:</b> * <b>Storage Hook:</b>
* Invoked before any partition aware FHIR operation, when the selected partition has been identified (ie. after the * Invoked before any partition aware FHIR operation, when the selected partition has been identified (ie. after the

View File

@ -115,6 +115,7 @@ public class Constants {
public static final String HEADER_AUTHORIZATION_VALPREFIX_BASIC = "Basic "; public static final String HEADER_AUTHORIZATION_VALPREFIX_BASIC = "Basic ";
public static final String HEADER_AUTHORIZATION_VALPREFIX_BEARER = "Bearer "; public static final String HEADER_AUTHORIZATION_VALPREFIX_BEARER = "Bearer ";
public static final String HEADER_CACHE_CONTROL = "Cache-Control"; public static final String HEADER_CACHE_CONTROL = "Cache-Control";
public static final String HEADER_CLIENT_TIMEZONE = "Timezone";
public static final String HEADER_CONTENT_DISPOSITION = "Content-Disposition"; public static final String HEADER_CONTENT_DISPOSITION = "Content-Disposition";
public static final String HEADER_CONTENT_ENCODING = "Content-Encoding"; public static final String HEADER_CONTENT_ENCODING = "Content-Encoding";
public static final String HEADER_CONTENT_LOCATION = "Content-Location"; public static final String HEADER_CONTENT_LOCATION = "Content-Location";

View File

@ -112,6 +112,8 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString()) theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString())
.getRight()); .getRight());
} }
// there is only one value; we will set it as the lower bound
// as a >= operation
validateAndSet(theDateParam, null); validateAndSet(theDateParam, null);
break; break;
case ENDS_BEFORE: case ENDS_BEFORE:
@ -121,6 +123,9 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString()) theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString())
.getLeft()); .getLeft());
} }
// there is only one value; we will set it as the upper bound
// as a <= operation
validateAndSet(null, theDateParam); validateAndSet(null, theDateParam);
break; break;
default: default:
@ -318,8 +323,8 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
case NOT_EQUAL: case NOT_EQUAL:
break; break;
case LESSTHAN: case LESSTHAN:
case APPROXIMATE:
case LESSTHAN_OR_EQUALS: case LESSTHAN_OR_EQUALS:
case APPROXIMATE:
case ENDS_BEFORE: case ENDS_BEFORE:
throw new IllegalStateException( throw new IllegalStateException(
Msg.code(1926) + "Invalid lower bound comparator: " + myLowerBound.getPrefix()); Msg.code(1926) + "Invalid lower bound comparator: " + myLowerBound.getPrefix());
@ -383,9 +388,9 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
case NOT_EQUAL: case NOT_EQUAL:
case GREATERTHAN_OR_EQUALS: case GREATERTHAN_OR_EQUALS:
break; break;
case LESSTHAN_OR_EQUALS:
case LESSTHAN: case LESSTHAN:
case APPROXIMATE: case APPROXIMATE:
case LESSTHAN_OR_EQUALS:
case ENDS_BEFORE: case ENDS_BEFORE:
throw new IllegalStateException( throw new IllegalStateException(
Msg.code(1928) + "Invalid lower bound comparator: " + theLowerBound.getPrefix()); Msg.code(1928) + "Invalid lower bound comparator: " + theLowerBound.getPrefix());
@ -470,10 +475,13 @@ public class DateRangeParam implements IQueryParameterAnd<DateParam> {
if (myLowerBound != null && myLowerBound.getMissing() != null) { if (myLowerBound != null && myLowerBound.getMissing() != null) {
retVal.add((myLowerBound)); retVal.add((myLowerBound));
} else { } else {
if (myLowerBound != null && !myLowerBound.isEmpty()) { boolean hasLowerBound = myLowerBound != null && !myLowerBound.isEmpty();
boolean hasUpperBound = myUpperBound != null && !myUpperBound.isEmpty();
if (hasLowerBound) {
retVal.add((myLowerBound)); retVal.add((myLowerBound));
} }
if (myUpperBound != null && !myUpperBound.isEmpty()) { if (hasUpperBound) {
retVal.add((myUpperBound)); retVal.add((myUpperBound));
} }
} }

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.util; package ca.uhn.fhir.util;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import com.google.common.base.Preconditions;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
@ -28,11 +29,20 @@ import java.lang.ref.SoftReference;
import java.text.ParseException; import java.text.ParseException;
import java.text.ParsePosition; import java.text.ParsePosition;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.Month;
import java.time.YearMonth;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalField;
import java.util.Calendar; import java.util.Calendar;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.TimeZone; import java.util.TimeZone;
/** /**
@ -93,6 +103,89 @@ public final class DateUtils {
*/ */
private DateUtils() {} private DateUtils() {}
/**
* Calculate a LocalDateTime with any missing date/time data points defaulting to the earliest values (ex 0 for hour)
* from a TemporalAccessor or empty if it doesn't contain a year.
*
* @param theTemporalAccessor The TemporalAccessor containing date/time information
* @return A LocalDateTime or empty
*/
public static Optional<LocalDateTime> extractLocalDateTimeForRangeStartOrEmpty(
TemporalAccessor theTemporalAccessor) {
if (theTemporalAccessor.isSupported(ChronoField.YEAR)) {
final int year = theTemporalAccessor.get(ChronoField.YEAR);
final Month month = Month.of(getTimeUnitIfSupported(theTemporalAccessor, ChronoField.MONTH_OF_YEAR, 1));
final int day = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.DAY_OF_MONTH, 1);
final int hour = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.HOUR_OF_DAY, 0);
final int minute = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.MINUTE_OF_HOUR, 0);
final int seconds = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.SECOND_OF_MINUTE, 0);
return Optional.of(LocalDateTime.of(year, month, day, hour, minute, seconds));
}
return Optional.empty();
}
/**
* Calculate a LocalDateTime with any missing date/time data points defaulting to the latest values (ex 23 for hour)
* from a TemporalAccessor or empty if it doesn't contain a year.
*
* @param theTemporalAccessor The TemporalAccessor containing date/time information
* @return A LocalDateTime or empty
*/
public static Optional<LocalDateTime> extractLocalDateTimeForRangeEndOrEmpty(TemporalAccessor theTemporalAccessor) {
if (theTemporalAccessor.isSupported(ChronoField.YEAR)) {
final int year = theTemporalAccessor.get(ChronoField.YEAR);
final Month month = Month.of(getTimeUnitIfSupported(theTemporalAccessor, ChronoField.MONTH_OF_YEAR, 12));
final int day = getTimeUnitIfSupported(
theTemporalAccessor,
ChronoField.DAY_OF_MONTH,
YearMonth.of(year, month).atEndOfMonth().getDayOfMonth());
final int hour = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.HOUR_OF_DAY, 23);
final int minute = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.MINUTE_OF_HOUR, 59);
final int seconds = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.SECOND_OF_MINUTE, 59);
return Optional.of(LocalDateTime.of(year, month, day, hour, minute, seconds));
}
return Optional.empty();
}
/**
* With the provided DateTimeFormatter, parse a date time String or return empty if the String doesn't correspond
* to the formatter.
*
* @param theDateTimeString A date/time String in some date format
* @param theSupportedDateTimeFormatter The DateTimeFormatter we expect corresponds to the String
* @return The parsed TemporalAccessor or empty
*/
public static Optional<TemporalAccessor> parseDateTimeStringIfValid(
String theDateTimeString, DateTimeFormatter theSupportedDateTimeFormatter) {
Objects.requireNonNull(theSupportedDateTimeFormatter);
Preconditions.checkArgument(StringUtils.isNotBlank(theDateTimeString));
try {
return Optional.of(theSupportedDateTimeFormatter.parse(theDateTimeString));
} catch (Exception exception) {
return Optional.empty();
}
}
private static int getTimeUnitIfSupported(
TemporalAccessor theTemporalAccessor, TemporalField theTemporalField, int theDefaultValue) {
return getTimeUnitIfSupportedOrEmpty(theTemporalAccessor, theTemporalField)
.orElse(theDefaultValue);
}
private static Optional<Integer> getTimeUnitIfSupportedOrEmpty(
TemporalAccessor theTemporalAccessor, TemporalField theTemporalField) {
if (theTemporalAccessor.isSupported(theTemporalField)) {
return Optional.of(theTemporalAccessor.get(theTemporalField));
}
return Optional.empty();
}
/** /**
* A factory for {@link SimpleDateFormat}s. The instances are stored in a * A factory for {@link SimpleDateFormat}s. The instances are stored in a
* threadlocal way because SimpleDateFormat is not thread safe as noted in * threadlocal way because SimpleDateFormat is not thread safe as noted in

View File

@ -110,6 +110,12 @@ public class HapiExtensions {
public static final String EXT_SP_UNIQUE = "http://hapifhir.io/fhir/StructureDefinition/sp-unique"; public static final String EXT_SP_UNIQUE = "http://hapifhir.io/fhir/StructureDefinition/sp-unique";
/**
* URL for extension on a Search Parameter which determines whether it should be enabled for searching for resources
*/
public static final String EXT_SEARCHPARAM_ENABLED_FOR_SEARCHING =
"http://hapifhir.io/fhir/StructureDefinition/searchparameter-enabled-for-searching";
/** /**
* URL for extension on a Phonetic String SearchParameter indicating that text values should be phonetically indexed with the named encoder * URL for extension on a Phonetic String SearchParameter indicating that text values should be phonetically indexed with the named encoder
*/ */

View File

@ -34,6 +34,7 @@ import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.PropertyWriter; import com.fasterxml.jackson.databind.ser.PropertyWriter;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter; import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import jakarta.annotation.Nonnull; import jakarta.annotation.Nonnull;
import java.io.IOException; import java.io.IOException;
@ -60,16 +61,22 @@ public class JsonUtil {
ourMapperPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL); ourMapperPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
ourMapperPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER); ourMapperPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER);
ourMapperPrettyPrint.enable(SerializationFeature.INDENT_OUTPUT); ourMapperPrettyPrint.enable(SerializationFeature.INDENT_OUTPUT);
// Needed to handle ZonedDateTime
ourMapperPrettyPrint.registerModule(new JavaTimeModule());
ourMapperNonPrettyPrint = new ObjectMapper(); ourMapperNonPrettyPrint = new ObjectMapper();
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL); ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
ourMapperNonPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER); ourMapperNonPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER);
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT); ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
// Needed to handle ZonedDateTime
ourMapperNonPrettyPrint.registerModule(new JavaTimeModule());
ourMapperIncludeSensitive = new ObjectMapper(); ourMapperIncludeSensitive = new ObjectMapper();
ourMapperIncludeSensitive.setFilterProvider(SHOW_ALL_DATA_FILTER_PROVIDER); ourMapperIncludeSensitive.setFilterProvider(SHOW_ALL_DATA_FILTER_PROVIDER);
ourMapperIncludeSensitive.setSerializationInclusion(JsonInclude.Include.NON_NULL); ourMapperIncludeSensitive.setSerializationInclusion(JsonInclude.Include.NON_NULL);
ourMapperIncludeSensitive.disable(SerializationFeature.INDENT_OUTPUT); ourMapperIncludeSensitive.disable(SerializationFeature.INDENT_OUTPUT);
// Needed to handle ZonedDateTime
ourMapperIncludeSensitive.registerModule(new JavaTimeModule());
} }
/** /**

View File

@ -43,6 +43,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -58,20 +59,20 @@ public class ParametersUtil {
public static Optional<String> getNamedParameterValueAsString( public static Optional<String> getNamedParameterValueAsString(
FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
Function<IPrimitiveType<?>, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null); Function<IPrimitiveType<?>, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null);
return extractNamedParameters(theCtx, theParameters, theParameterName, mapper).stream() return extractNamedParameterValues(theCtx, theParameters, theParameterName, mapper).stream()
.findFirst(); .findFirst();
} }
public static List<String> getNamedParameterValuesAsString( public static List<String> getNamedParameterValuesAsString(
FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
Function<IPrimitiveType<?>, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null); Function<IPrimitiveType<?>, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null);
return extractNamedParameters(theCtx, theParameters, theParameterName, mapper); return extractNamedParameterValues(theCtx, theParameters, theParameterName, mapper);
} }
public static List<Integer> getNamedParameterValuesAsInteger( public static List<Integer> getNamedParameterValuesAsInteger(
FhirContext theCtx, IBaseParameters theParameters, String theParameterName) { FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
Function<IPrimitiveType<?>, Integer> mapper = t -> (Integer) t.getValue(); Function<IPrimitiveType<?>, Integer> mapper = t -> (Integer) t.getValue();
return extractNamedParameters(theCtx, theParameters, theParameterName, mapper); return extractNamedParameterValues(theCtx, theParameters, theParameterName, mapper);
} }
public static Optional<Integer> getNamedParameterValueAsInteger( public static Optional<Integer> getNamedParameterValueAsInteger(
@ -80,6 +81,19 @@ public class ParametersUtil {
.findFirst(); .findFirst();
} }
/**
* Returns the resource within a parameter.
* @param theCtx thr FHIR context
* @param theParameters the parameters instance where to look for the resource
* @param theParameterName the parameter name
* @return the resource
*/
public static Optional<IBaseResource> getNamedParameterResource(
FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
return extractNamedParameterResources(theCtx, theParameters, theParameterName).stream()
.findFirst();
}
public static Optional<IBase> getNamedParameter( public static Optional<IBase> getNamedParameter(
FhirContext theCtx, IBaseResource theParameters, String theParameterName) { FhirContext theCtx, IBaseResource theParameters, String theParameterName) {
return getNamedParameters(theCtx, theParameters, theParameterName).stream() return getNamedParameters(theCtx, theParameters, theParameterName).stream()
@ -153,7 +167,7 @@ public class ParametersUtil {
.map(t -> (Integer) t); .map(t -> (Integer) t);
} }
private static <T> List<T> extractNamedParameters( private static <T> List<T> extractNamedParameterValues(
FhirContext theCtx, FhirContext theCtx,
IBaseParameters theParameters, IBaseParameters theParameters,
String theParameterName, String theParameterName,
@ -170,7 +184,25 @@ public class ParametersUtil {
.filter(t -> t instanceof IPrimitiveType<?>) .filter(t -> t instanceof IPrimitiveType<?>)
.map(t -> ((IPrimitiveType<?>) t)) .map(t -> ((IPrimitiveType<?>) t))
.map(theMapper) .map(theMapper)
.filter(t -> t != null) .filter(Objects::nonNull)
.forEach(retVal::add);
}
return retVal;
}
private static List<IBaseResource> extractNamedParameterResources(
FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
List<IBaseResource> retVal = new ArrayList<>();
List<IBase> namedParameters = getNamedParameters(theCtx, theParameters, theParameterName);
for (IBase nextParameter : namedParameters) {
BaseRuntimeElementCompositeDefinition<?> nextParameterDef =
(BaseRuntimeElementCompositeDefinition<?>) theCtx.getElementDefinition(nextParameter.getClass());
BaseRuntimeChildDefinition resourceChild = nextParameterDef.getChildByName("resource");
List<IBase> resourceValues = resourceChild.getAccessor().getValues(nextParameter);
resourceValues.stream()
.filter(IBaseResource.class::isInstance)
.map(t -> ((IBaseResource) t))
.forEach(retVal::add); .forEach(retVal::add);
} }
return retVal; return retVal;

View File

@ -26,10 +26,12 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeChildChoiceDefinition; import ca.uhn.fhir.context.RuntimeChildChoiceDefinition;
import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.tuple.Triple; import org.apache.commons.lang3.tuple.Triple;
import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBackboneElement; import org.hl7.fhir.instance.model.api.IBaseBackboneElement;
import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -93,6 +95,8 @@ public final class TerserUtil {
private static final Logger ourLog = getLogger(TerserUtil.class); private static final Logger ourLog = getLogger(TerserUtil.class);
private static final String EQUALS_DEEP = "equalsDeep"; private static final String EQUALS_DEEP = "equalsDeep";
public static final String DATA_ABSENT_REASON_EXTENSION_URI =
"http://hl7.org/fhir/StructureDefinition/data-absent-reason";
private TerserUtil() {} private TerserUtil() {}
@ -266,6 +270,15 @@ public final class TerserUtil {
return theItems.stream().anyMatch(i -> equals(i, theItem, method)); return theItems.stream().anyMatch(i -> equals(i, theItem, method));
} }
private static boolean hasDataAbsentReason(IBase theItem) {
if (theItem instanceof IBaseHasExtensions) {
IBaseHasExtensions hasExtensions = (IBaseHasExtensions) theItem;
return hasExtensions.getExtension().stream()
.anyMatch(t -> StringUtils.equals(t.getUrl(), DATA_ABSENT_REASON_EXTENSION_URI));
}
return false;
}
/** /**
* Merges all fields on the provided instance. <code>theTo</code> will contain a union of all values from <code>theFrom</code> * Merges all fields on the provided instance. <code>theTo</code> will contain a union of all values from <code>theFrom</code>
* instance and <code>theTo</code> instance. * instance and <code>theTo</code> instance.
@ -695,24 +708,36 @@ public final class TerserUtil {
BaseRuntimeChildDefinition childDefinition, BaseRuntimeChildDefinition childDefinition,
List<IBase> theFromFieldValues, List<IBase> theFromFieldValues,
List<IBase> theToFieldValues) { List<IBase> theToFieldValues) {
for (IBase theFromFieldValue : theFromFieldValues) { if (!theFromFieldValues.isEmpty() && theToFieldValues.stream().anyMatch(TerserUtil::hasDataAbsentReason)) {
if (contains(theFromFieldValue, theToFieldValues)) { // If the to resource has a data absent reason, and there is potentially real data incoming
// in the from resource, we should clear the data absent reason because it won't be absent anymore.
theToFieldValues = removeDataAbsentReason(theTo, childDefinition, theToFieldValues);
}
for (IBase fromFieldValue : theFromFieldValues) {
if (contains(fromFieldValue, theToFieldValues)) {
continue; continue;
} }
IBase newFieldValue = newElement(theTerser, childDefinition, theFromFieldValue, null); if (hasDataAbsentReason(fromFieldValue) && !theToFieldValues.isEmpty()) {
if (theFromFieldValue instanceof IPrimitiveType) { // if the from field value asserts a reason the field isn't populated, but the to field is populated,
// we don't want to overwrite real data with the extension
continue;
}
IBase newFieldValue = newElement(theTerser, childDefinition, fromFieldValue, null);
if (fromFieldValue instanceof IPrimitiveType) {
try { try {
Method copyMethod = getMethod(theFromFieldValue, "copy"); Method copyMethod = getMethod(fromFieldValue, "copy");
if (copyMethod != null) { if (copyMethod != null) {
newFieldValue = (IBase) copyMethod.invoke(theFromFieldValue, new Object[] {}); newFieldValue = (IBase) copyMethod.invoke(fromFieldValue, new Object[] {});
} }
} catch (Throwable t) { } catch (Throwable t) {
((IPrimitiveType) newFieldValue) ((IPrimitiveType<?>) newFieldValue)
.setValueAsString(((IPrimitiveType) theFromFieldValue).getValueAsString()); .setValueAsString(((IPrimitiveType<?>) fromFieldValue).getValueAsString());
} }
} else { } else {
theTerser.cloneInto(theFromFieldValue, newFieldValue, true); theTerser.cloneInto(fromFieldValue, newFieldValue, true);
} }
try { try {
@ -724,6 +749,21 @@ public final class TerserUtil {
} }
} }
private static List<IBase> removeDataAbsentReason(
IBaseResource theResource, BaseRuntimeChildDefinition theFieldDefinition, List<IBase> theFieldValues) {
for (int i = 0; i < theFieldValues.size(); i++) {
if (hasDataAbsentReason(theFieldValues.get(i))) {
try {
theFieldDefinition.getMutator().remove(theResource, i);
} catch (UnsupportedOperationException e) {
// the field must be single-valued, just clear it
theFieldDefinition.getMutator().setValue(theResource, null);
}
}
}
return theFieldDefinition.getAccessor().getValues(theResource);
}
/** /**
* Clones the specified resource. * Clones the specified resource.
* *

View File

@ -162,9 +162,16 @@ public enum VersionEnum {
V7_3_0, V7_3_0,
V7_4_0, V7_4_0,
V7_4_1,
V7_4_2,
V7_4_3,
V7_4_4,
V7_4_5,
V7_5_0, V7_5_0,
V7_6_0; V7_6_0,
V7_7_0,
V7_8_0;
public static VersionEnum latestVersion() { public static VersionEnum latestVersion() {
VersionEnum[] values = VersionEnum.values(); VersionEnum[] values = VersionEnum.values();

View File

@ -133,6 +133,7 @@ ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulTimingSuffix=Took {0}ms.
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceNotExisting=Not deleted, resource {0} does not exist. ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceNotExisting=Not deleted, resource {0} does not exist.
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceAlreadyDeleted=Not deleted, resource {0} was already deleted. ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceAlreadyDeleted=Not deleted, resource {0} was already deleted.
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2} ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2}
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameterNotEnabledForSearch=Search parameter "{0}" for resource type "{1}" is not active for searching. Valid search parameters for this search are: {2}
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameter=Unknown _sort parameter value "{0}" for resource type "{1}" (Note: sort parameters values must use a valid Search Parameter). Valid values for this search are: {2} ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameter=Unknown _sort parameter value "{0}" for resource type "{1}" (Note: sort parameters values must use a valid Search Parameter). Valid values for this search are: {2}
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameterTooManyChains=Invalid _sort expression, can not chain more than once in a sort expression: {0} ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameterTooManyChains=Invalid _sort expression, can not chain more than once in a sort expression: {0}

View File

@ -0,0 +1,179 @@
package ca.uhn.fhir.util;
import jakarta.annotation.Nullable;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.time.LocalDateTime;
import java.time.Month;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAccessor;
import java.util.Optional;
import java.util.stream.Stream;
import static org.assertj.core.api.Assertions.assertThat;
class DateUtilsTest {
private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY = DateTimeFormatter.ofPattern("yyyy");
private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM = DateTimeFormatter.ofPattern("yyyy-MM");
private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM_DD = DateTimeFormatter.ISO_DATE;
private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM_DD_HH = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH");
private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM_DD_HH_MM = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm");
private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM_DD_HH_MM_SS = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
private static Stream<Arguments> extractLocalDateTimeStartIfValidParams() {
return Stream.of(
Arguments.of(
getTemporalAccessor("2024"),
LocalDateTime.of(2024, Month.JANUARY, 1, 0, 0, 0)
),
Arguments.of(
getTemporalAccessor("2023-02"),
LocalDateTime.of(2023, Month.FEBRUARY, 1, 0, 0, 0)
),
Arguments.of(
getTemporalAccessor("2022-09"),
LocalDateTime.of(2022, Month.SEPTEMBER, 1, 0, 0, 0)
),
Arguments.of(
getTemporalAccessor("2021-03-24"),
LocalDateTime.of(2021, Month.MARCH, 24, 0, 0, 0)
),
Arguments.of(
getTemporalAccessor("2024-10-23"),
LocalDateTime.of(2024, Month.OCTOBER, 23, 0, 0, 0)
),
Arguments.of(
getTemporalAccessor("2024-08-24T12"),
LocalDateTime.of(2024, Month.AUGUST, 24, 12, 0, 0)
),
Arguments.of(
getTemporalAccessor("2024-11-24T12:35"),
LocalDateTime.of(2024, Month.NOVEMBER, 24, 12, 35, 0)
),
Arguments.of(
getTemporalAccessor("2024-09-24T12:35:47"),
LocalDateTime.of(2024, Month.SEPTEMBER, 24, 12, 35, 47)
)
);
}
private static Stream<Arguments> extractLocalDateTimeEndIfValidParams() {
return Stream.of(
Arguments.of(
getTemporalAccessor("2024"),
LocalDateTime.of(2024, Month.DECEMBER, 31, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-01"),
LocalDateTime.of(2023, Month.JANUARY, 31, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-02"),
LocalDateTime.of(2023, Month.FEBRUARY, 28, 23, 59, 59)
),
// Leap year
Arguments.of(
getTemporalAccessor("2024-02"),
LocalDateTime.of(2024, Month.FEBRUARY, 29, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-03"),
LocalDateTime.of(2023, Month.MARCH, 31, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-04"),
LocalDateTime.of(2023, Month.APRIL, 30, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-05"),
LocalDateTime.of(2023, Month.MAY, 31, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-06"),
LocalDateTime.of(2023, Month.JUNE, 30, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-07"),
LocalDateTime.of(2023, Month.JULY, 31, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-08"),
LocalDateTime.of(2023, Month.AUGUST, 31, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2023-09"),
LocalDateTime.of(2023, Month.SEPTEMBER, 30, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2022-10"),
LocalDateTime.of(2022, Month.OCTOBER, 31, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2022-11"),
LocalDateTime.of(2022, Month.NOVEMBER, 30, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2022-12"),
LocalDateTime.of(2022, Month.DECEMBER, 31, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2021-03-24"),
LocalDateTime.of(2021, Month.MARCH, 24, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2024-10-23"),
LocalDateTime.of(2024, Month.OCTOBER, 23, 23, 59, 59)
),
Arguments.of(
getTemporalAccessor("2024-09-24T12:35:47"),
LocalDateTime.of(2024, Month.SEPTEMBER, 24, 12, 35, 47)
)
);
}
@ParameterizedTest
@MethodSource("extractLocalDateTimeStartIfValidParams")
void extractLocalDateTimeStartIfValid (
TemporalAccessor theTemporalAccessor,
@Nullable LocalDateTime theExpectedResult) {
assertThat(DateUtils.extractLocalDateTimeForRangeStartOrEmpty(theTemporalAccessor))
.isEqualTo(Optional.ofNullable(theExpectedResult));
}
@ParameterizedTest
@MethodSource("extractLocalDateTimeEndIfValidParams")
void extractLocalDateTimeEndIfValid (
TemporalAccessor theTemporalAccessor,
@Nullable LocalDateTime theExpectedResult) {
assertThat(DateUtils.extractLocalDateTimeForRangeEndOrEmpty(theTemporalAccessor))
.isEqualTo(Optional.ofNullable(theExpectedResult));
}
private static TemporalAccessor getTemporalAccessor(String theDateTimeString) {
final DateTimeFormatter dateTimeFormatter = getDateTimeFormatter(theDateTimeString);
assertThat(dateTimeFormatter)
.withFailMessage("Cannot find DateTimeFormatter for: " + theDateTimeString)
.isNotNull();
return DateUtils.parseDateTimeStringIfValid(
theDateTimeString,
dateTimeFormatter
).orElseThrow(() -> new IllegalArgumentException("Unable to parse: " + theDateTimeString));
}
private static DateTimeFormatter getDateTimeFormatter(String theDateTimeString) {
return switch (theDateTimeString.length()) {
case 4 -> DATE_TIME_FORMATTER_YYYY;
case 7 -> DATE_TIME_FORMATTER_YYYY_MM;
case 10 -> DATE_TIME_FORMATTER_YYYY_MM_DD;
case 13 -> DATE_TIME_FORMATTER_YYYY_MM_DD_HH;
case 16 -> DATE_TIME_FORMATTER_YYYY_MM_DD_HH_MM;
case 19 -> DATE_TIME_FORMATTER_YYYY_MM_DD_HH_MM_SS;
default -> null;
};
}
}

View File

@ -77,5 +77,4 @@ public class TaskChunkerTest {
Arguments.of(List.of(1,2,3,4,5,6,7,8,9), List.of(List.of(1,2,3), List.of(4,5,6), List.of(7,8,9))) Arguments.of(List.of(1,2,3,4,5,6,7,8,9), List.of(List.of(1,2,3), List.of(4,5,6), List.of(7,8,9)))
); );
} }
} }

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId> <artifactId>hapi-fhir-bom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>HAPI FHIR BOM</name> <name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -64,4 +64,12 @@
<property name="format" value="^(Base|Abstract).+$"/> <property name="format" value="^(Base|Abstract).+$"/>
</module> </module>
</module> </module>
<!-- for suppression of rules; to use, surround code to exclude with comments: -->
<!-- CHECKSTYLE.OFF RuleToDisable AND CHECKSTYLE.ON RuleToDisable -->
<module name="SuppressWithPlainTextCommentFilter">
<property name="offCommentFormat" value="CHECKSTYLE.OFF\: ([\w\|]+)" />
<property name="onCommentFormat" value="CHECKSTYLE.ON\: ([\w\|]+)" />
</module>
</module> </module>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -6,7 +6,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId> <artifactId>hapi-fhir-cli</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -1357,7 +1357,9 @@ public class GenericOkHttpClientDstu2Test {
.returnBundle(Bundle.class) .returnBundle(Bundle.class)
.execute(); .execute();
assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", MY_SERVLET.ourRequestUri); assertThat(MY_SERVLET.ourRequestUri).isIn(
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
// assertThat(MY_SERVLET.ourRequestUri, // assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + // either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() +
@ -1391,7 +1393,10 @@ public class GenericOkHttpClientDstu2Test {
.execute(); .execute();
assertThat(MY_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?"); assertThat(MY_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
assertThat(MY_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname"); assertThat(MY_SERVLET.ourRequestUri).satisfiesAnyOf(
s -> assertThat(s).contains("_elements=identifier%2Cname"),
s -> assertThat(s).contains("_elements=name%2Cidentifier")
);
assertThat(MY_SERVLET.ourRequestUri).doesNotContain("_format=json"); assertThat(MY_SERVLET.ourRequestUri).doesNotContain("_format=json");
// assertThat(MY_SERVLET.ourRequestUri, // assertThat(MY_SERVLET.ourRequestUri,

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId> <artifactId>hapi-fhir</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -39,7 +39,7 @@ public class BundleFetcher {
// START SNIPPET: loadAll // START SNIPPET: loadAll
// Create a context and a client // Create a context and a client
FhirContext ctx = FhirContext.forR4(); FhirContext ctx = FhirContext.forR4();
String serverBase = "http://hapi.fhr.org/baseR4"; String serverBase = "http://hapi.fhir.org/baseR4";
IGenericClient client = ctx.newRestfulGenericClient(serverBase); IGenericClient client = ctx.newRestfulGenericClient(serverBase);
// We'll populate this list // We'll populate this list

View File

@ -1,3 +1,3 @@
--- ---
release-date: "2023-02-18" release-date: "2024-02-18"
codename: "Apollo" codename: "Apollo"

View File

@ -1,3 +1,3 @@
--- ---
release-date: "2023-05-18" release-date: "2024-05-18"
codename: "Borealis" codename: "Borealis"

View File

@ -0,0 +1,3 @@
---
release-date: "2024-09-20"
codename: "Copernicus"

View File

@ -0,0 +1,3 @@
---
release-date: "2024-09-30"
codename: "Copernicus"

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 6363
title: "This release updates the org.hl7.fhir core dependency up to 6.3.23, in order to patch [CVE-2024-45294](https://nvd.nist.gov/vuln/detail/CVE-2024-45294)."

View File

@ -0,0 +1,3 @@
---
release-date: "2024-10-17"
codename: "Copernicus"

View File

@ -0,0 +1,3 @@
---
release-date: "2024-10-21"
codename: "Copernicus"

View File

@ -0,0 +1,6 @@
---
type: add
issue: 6210
jira: SMILE-8428
title: "Batch instance ID and chunk ID have been added to the logging context so that they can be automatically added to
batch-related messages in the log."

View File

@ -1,6 +1,6 @@
--- ---
type: fix type: fix
backport: 7.2.3 backport: 7.2.3,7.4.2
issue: 6216 issue: 6216
jira: SMILE-8806 jira: SMILE-8806
title: "Previously, searches combining the `_text` query parameter (using Lucene/Elasticsearch) with query parameters title: "Previously, searches combining the `_text` query parameter (using Lucene/Elasticsearch) with query parameters

View File

@ -0,0 +1,6 @@
---
type: perf
issue: 6253
title: "A cache has been added to the validation services layer which results
in improved validation performance. Thanks to Max Bureck for the
contribution!"

View File

@ -0,0 +1,17 @@
---
type: fix
issue: 6285
title: "Updated the Reindex Batch2 job to allow
for an additional step that will check to ensure
that no pending 'reindex' work is needed.
This was done to prevent a bug in which
value set expansion would not return all
the existing CodeSystem Concepts after
a reindex call, due to some of the concepts
being deferred to future job runs.
As such, `$reindex` operations on CodeSystems
will no longer result in incorrect value set
expansion when such an expansion is called
'too soon' after a $reindex operation.
"

View File

@ -0,0 +1,6 @@
---
type: add
issue: 6313
jira: SMILE-8847
title: "The `STORAGE_PARTITION_DELETED` pointcut has been added and will be called upon deleting a partition
using the `$partition-management-delete-partition` operation."

View File

@ -1,6 +1,7 @@
--- ---
type: fix type: fix
issue: 6317 issue: 6317
backport: 7.4.3
title: "Previously, defining a unique combo Search Parameter with the DateTime component and submitting multiple title: "Previously, defining a unique combo Search Parameter with the DateTime component and submitting multiple
resources with the same dateTime element (e.g. Observation.effectiveDateTime) resulted in duplicate resource creation. resources with the same dateTime element (e.g. Observation.effectiveDateTime) resulted in duplicate resource creation.
This has been fixed." This has been fixed."

View File

@ -0,0 +1,5 @@
---
type: perf
issue: 6323
title: "A synchronization choke point was removed from the model object initialization code, reducing the risk of
multi-thread contention."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 6325
title: "A new configuration option, `PartitionSettings#setPartitionIdsInPrimaryKeys(boolean)` configures the query engine
to include the partitioning column in search query joins."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 6339
jira: SMILE-9044
title: "Fixed a bug in migrations when using Postgres when using the non-default schema. If a migration attempted to drop a primary key, the generated SQL would only ever target the `public` schema.
This has been corrected, and the current schema is now used, with `public` as a fallback. Thanks to Adrienne Sox for the contribution!"

View File

@ -0,0 +1,6 @@
---
type: change
issue: 6341
title: "The CachingValidationSupport cache for concept translations will
now keep up to 500000 translations instead of the previous 5000.
This will be made configurable in a future release."

View File

@ -0,0 +1,8 @@
---
type: perf
issue: 6345
title: "Date searches using equality would perform badly as the query planner
does not know that our LOW_VALUE columns are always < HIGH_VALUE
columns, and HIGH_VALUE is always > LOW_VALUE columns.
These queries have been fixed to account for this.
"

View File

@ -0,0 +1,6 @@
---
type: add
issue: 6357
title: Upgrade the Clinical Reasoning module to the latest release of 3.13.0. This update comes with several changes
and feature enhancements to CPG and dQM clinical-reasoning operations. Please review associated ticket and upgrade.md
for detailed list of changes.

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 6359
backport: 7.4.4
title: "After upgrading org.hl7.fhir.core from 6.1.2.2 to 6.3.11, the $validate-code operation stopped returning an
error for invalid codes using remote terminology. This has been fixed."

View File

@ -0,0 +1,6 @@
---
type: add
issue: 6359
backport: 7.4.4
title: "Remote Terminology validation has been enhanced to support output parameter `issues` for the $validate-code
operation."

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 6365
title: "A crash while executing a search with named `_include` parameters on
MSSQL has been fixed. Thanks to Craig McClendon for the pull request!"

View File

@ -0,0 +1,4 @@
---
type: add
issue: 6366
title: "Add plumbing for combining IConsentServices with different vote tally strategies"

View File

@ -0,0 +1,6 @@
---
type: add
issue: 6370
title: "When using the FHIR `TerserUtil` to merge two resource, if one resource has real data in a particular field,
and the other resource has a `data-absent-reason` extension in the same field, the real data will be given
precedence in the merged resource, and the extension will be ignored."

View File

@ -0,0 +1,10 @@
---
type: fix
issue: 6372
jira: SMILE-9073
backport: 7.4.5
title: "Searches that combined full-text searching (i.e. `_text` or `_content`)
with other search parameters could fail to return all results if we encountered
1600 matches against the full-text index where none of them match the rest of the query.
This has now been fixed.
"

View File

@ -0,0 +1,6 @@
---
type: add
issue: 6375
title: "A new experimental JPA setting has been added to JpaStorageSettings which
causes searches for token SearchParameters to include a predicate on the
HASH_IDENTITY column even if it is not needed because other hashes are in use."

View File

@ -4,7 +4,7 @@
title: "The version of a few dependencies have been bumped to more recent versions title: "The version of a few dependencies have been bumped to more recent versions
(dependent HAPI modules listed in brackets): (dependent HAPI modules listed in brackets):
<ul> <ul>
<li>org.hl7.fhir.core (Base): 6.3.18 -&gt; 6.3.23</li> <li>org.hl7.fhir.core (Base): 6.3.18 -&gt; 6.3.25</li>
<li>Bower/Moment.js (hapi-fhir-testpage-overlay): 2.27.0 -&gt; 2.29.4</li> <li>Bower/Moment.js (hapi-fhir-testpage-overlay): 2.27.0 -&gt; 2.29.4</li>
<li>htmlunit (Base): 3.9.0 -&gt; 3.11.0</li> <li>htmlunit (Base): 3.9.0 -&gt; 3.11.0</li>
<li>Elasticsearch (Base): 8.11.1 -&gt; 8.14.3</li> <li>Elasticsearch (Base): 8.11.1 -&gt; 8.14.3</li>

View File

@ -0,0 +1,25 @@
# Measures and Care Gaps
## TimeZone Header
DQM `$care-gaps` and `$evaluate-measure` will convert parameters `periodStart` and `periodEnd`
according to a timezone supplied by the client, not the server timezone as it was previously. Clients can leverage this
functionality by passing in a new `Timezone` header (ex: `America/Denver`). If nothing is supplied, it will default to
UTC.
## CareGaps Operation Parameters
### Parameters removed
Certain `$care-gaps` operation parameters have been dropped, because they are not used or likely to be implemented
* `topic`
* `practitioner` is now callable via `subject` parameter
* `organization`
* `program`
### Parameters added:
* `measureIdentifier` now is available to resolve measure resources for evaluation
* `nonDocument` is a new optional parameter that defaults to `false` which returns standard `document` bundle for `$care-gaps`.
If `true`, this will return summarized subject bundle with only detectedIssue.
# SDC $populate operation
The `subject` parameter of the `Questionnaire/$populate` operation has been changed to expect a `Reference` as specified
in the SDC IG.

View File

@ -0,0 +1,7 @@
---
type: add
issue: 6107
title: "A new extension has been created for use on SearchParameter resources in the JPA server. This extension causes
a SearchParameter to be indexed, but to not be available for use in searches. This can be set when a new SP is created
in order to prevent it from being used before an index has been completed. See
[Introducing Search Parameters on Existing Data](https://smilecdr.com/docs/fhir_standard/fhir_search_custom_search_parameters.html) for more information."

View File

@ -0,0 +1,7 @@
---
type: perf
issue: 6224
title: "The JPA server will no longer use a separate thread and database connection
to resolve tag definitions. This should improve performance in some cases, and
resolves compatibility issues for some environments. Thanks to Ibrahim (Trifork A/S)
for the pull request!"

View File

@ -0,0 +1,10 @@
---
type: fix
issue: 6258
title: "The AuthorizationInterceptor handling for operations has been improved
so that operation rules now directly test the contents of response Bundle
or Parameters objects returned by the operation when configure to require
explicit response authorization. This fixes a regression in 7.4.0 where
operation responses could sometimes be denied even if appropriate
permissions were granted to view resources in a response bundle. Thanks to
Gijsbert van den Brink for reporting the issue with a sample test!"

View File

@ -0,0 +1,7 @@
---
type: perf
issue: 6395
title: "A new configuration option has been added to `SubsciptionSubmitterConfig` which
causes Subscription resources to be submitted to the processing queue synchronously
instead of asynchronously as all other resources are. This is useful for cases where
subscriptions need to be activated quickly. Thanks to Michal Sevcik for the contribution!"

View File

@ -0,0 +1,5 @@
---
type: add
issue: 6398
title: "The NPM package search module has been enhanced to support searching by
the package author and the package version attributes."

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 6407
title: "Corrected IHE BALP AuditEvent generation, so that it records one Audit Event per resource owner. Thanks to Jens Villadsen (@jkiddo) for the contribution!"

View File

@ -0,0 +1,8 @@
---
- item:
type: "add"
title: "The version of a few dependencies have been bumped to more recent versions
(dependent HAPI modules listed in brackets):
<ul>
<li>org.hl7.fhir.core (Base): 6.3.25 -&gt; 6.4.0</li>
</ul>"

View File

@ -0,0 +1,3 @@
---
release-date: "2025-02-17"
codename: "TBD"

View File

@ -111,8 +111,50 @@ A request using `periodStart` and `periodEnd` looks like:
```bash ```bash
GET fhir/Measure/<MeasureId>/$evaluate-measure?periodStart=2019-01-01&periodEnd=2019-12-31 GET fhir/Measure/<MeasureId>/$evaluate-measure?periodStart=2019-01-01&periodEnd=2019-12-31
``` ```
`periodStart` and `periodEnd` support Dates (YYYY, YYYY-MM, or YYYY-MM-DD) and DateTimes (YYYY-MM-DDThh:mm:ss). DateTime formats of YYYY-MM-DDThh:mm:ss+zz no longer accepted. To pass in timezones to period queries, please see the [Headers](#headers) section below:
`periodStart` and `periodEnd` support Dates (YYYY, YYYY-MM, or YYYY-MM-DD) and DateTimes (YYYY-MM-DDThh:mm:ss+zz:zz) #### Headers
The behaviour of the `periodStart` and `periodEnd` parameters depends on the value of the `Timezone` header. The measure report will be queried according to the period range, as denoted by that timezone, **not the server timezone**.
Accepted values for this header are documented on the [Wikipedia timezones page](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)
ex: `Timezone`:`America/Denver` will set the timezone to Mountain Time.
If the client omits this header, the timezone will default to UTC.
Please consult the below table for examples of various combinations of start, end, and timezone, as well as the resulting queried periods:
| Request timezone | Start | End | Converted Start | Converted End |
|--------------------| ---------------------| --------------------|---------------------------|---------------------------|
| (unset) | (unset) | (unset) | N/A | N/A |
| (unset) | 2020 | 2021 | 2020-01-01T00:00:00Z | 2021-12-31T23:59:59Z |
| Z | 2020 | 2021 | 2020-01-01T00:00:00Z | 2021-12-31T23:59:59Z |
| UTC | 2020 | 2021 | 2020-01-01T00:00:00Z | 2021-12-31T23:59:59Z |
| America/St_Johns | 2020 | 2021 | 2020-01-01T00:00:00-03:30 | 2021-12-31T23:59:59-03:30 |
| America/Toronto | 2020 | 2021 | 2020-01-01T00:00:00-05:00 | 2021-12-31T23:59:59-05:00 |
| America/Denver | 2020 | 2021 | 2020-01-01T00:00:00-07:00 | 2021-12-31T23:59:59-07:00 |
| (unset) | 2022-02 | 2022-08 | 2022-02-01T00:00:00Z | 2022-08-31T23:59:59Z |
| UTC | 2022-02 | 2022-08 | 2022-02-01T00:00:00Z | 2022-08-31T23:59:59Z |
| America/St_Johns | 2022-02 | 2022-08 | 2022-02-01T00:00:00-03:30 | 2022-08-31T23:59:59-02:30 |
| America/Toronto | 2022-02 | 2022-08 | 2022-02-01T00:00:00-05:00 | 2022-08-31T23:59:59-04:00 |
| America/Denver | 2022-02 | 2022-08 | 2022-02-01T00:00:00-07:00 | 2022-08-31T23:59:59-06:00 |
| (unset) | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00Z | 2024-02-26T23:59:59Z |
| UTC | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00Z | 2024-02-26T23:59:59Z |
| America/St_Johns | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00-03:30 | 2024-02-26T23:59:59-03:30 |
| America/Toronto | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00-05:00 | 2024-02-26T23:59:59-05:00 |
| America/Denver | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00-07:00 | 2024-02-26T23:59:59-07:00 |
| (unset) | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00Z | 2024-09-26T23:59:59Z |
| UTC | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00Z | 2024-09-26T23:59:59Z |
| America/St_Johns | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00-02:30 | 2024-09-26T23:59:59-02:30 |
| America/Toronto | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00-04:00 | 2024-09-26T23:59:59-04:00 |
| America/Denver | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00-06:00 | 2024-09-26T23:59:59-06:00 |
| (unset) | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-06:00 | 2024-09-26T11:59:59-06:00 |
| Z | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-06:00 | 2024-09-26T11:59:59-06:00 |
| UTC | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-06:00 | 2024-09-26T11:59:59-06:00 |
| America/St_Johns | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-02:30 | 2024-09-26T11:59:59-02:30 |
| America/Toronto | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-04:00 | 2024-09-26T11:59:59-04:00 |
| America/Denver | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-06:00 | 2024-09-26T11:59:59-06:00 |
#### Report Types #### Report Types

View File

@ -19,6 +19,7 @@ HAPI implements the following operations for PlanDefinitions:
* [$apply](/docs/clinical_reasoning/plan_definitions.html#apply) * [$apply](/docs/clinical_reasoning/plan_definitions.html#apply)
* [$package](/docs/clinical_reasoning/plan_definitions.html#package) * [$package](/docs/clinical_reasoning/plan_definitions.html#package)
* [$data-requirements](/docs/clinical_reasoning/plan_definitions.html#datarequirements)
## Apply ## Apply
@ -40,32 +41,36 @@ GET http://your-server-base/fhir/PlanDefinition/opioidcds-10-patient-view/$apply
The following parameters are supported for the `PlanDefinition/$apply` and `PlanDefinition/$r5.apply` operation: The following parameters are supported for the `PlanDefinition/$apply` and `PlanDefinition/$r5.apply` operation:
| Parameter | Type | Description | | Parameter | Type | Description |
|---------------------|---------------------------|-------------| |-------------------------|---------------------------|-------------|
| planDefinition | PlanDefinition | The plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter is required, or a url (and optionally version) must be supplied. | | planDefinition | PlanDefinition | The plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter is required, or a url (and optionally version) must be supplied. |
| canonical | canonical(PlanDefinition) | The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. | | canonical | canonical(PlanDefinition) | The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. |
| url | uri | Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. | | url | uri | Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. |
| version | string | Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. | | version | string | Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. |
| subject | string(reference) | The subject(s) that is/are the target of the plan definition to be applied. | | subject | string(reference) | The subject(s) that is/are the target of the plan definition to be applied. |
| encounter | string(reference) | The encounter in context, if any. | | encounter | string(reference) | The encounter in context, if any. |
| practitioner | string(reference) | The practitioner applying the plan definition. | | practitioner | string(reference) | The practitioner applying the plan definition. |
| organization | string(reference) | The organization applying the plan definition. | | organization | string(reference) | The organization applying the plan definition. |
| userType | CodeableConcept | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) | | userType | CodeableConcept | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) |
| userLanguage | CodeableConcept | Preferred language of the person using the system | | userLanguage | CodeableConcept | Preferred language of the person using the system |
| userTaskContext | CodeableConcept | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources. | | userTaskContext | CodeableConcept | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources. |
| setting | CodeableConcept | The current setting of the request (inpatient, outpatient, etc.). | | setting | CodeableConcept | The current setting of the request (inpatient, outpatient, etc.). |
| settingContext | CodeableConcept | Additional detail about the setting of the request, if any | | settingContext | CodeableConcept | Additional detail about the setting of the request, if any |
| parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. | | parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. |
| useServerData | boolean | Whether to use data from the server performing the evaluation. If this parameter is true (the default), then the operation will use data first from any bundles provided as parameters (through the data and prefetch parameters), second data from the server performing the operation, and third, data from the dataEndpoint parameter (if provided). If this parameter is false, the operation will use data first from the bundles provided in the data or prefetch parameters, and second from the dataEndpoint parameter (if provided). | | useServerData | boolean | Whether to use data from the server performing the evaluation. If this parameter is true (the default), then the operation will use data first from any bundles provided as parameters (through the data and prefetch parameters), second data from the server performing the operation, and third, data from the dataEndpoint parameter (if provided). If this parameter is false, the operation will use data first from the bundles provided in the data or prefetch parameters, and second from the dataEndpoint parameter (if provided). |
| data | Bundle | Data to be made available to the PlanDefinition evaluation. | | data | Bundle | Data to be made available to the PlanDefinition evaluation. |
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. | | prefetchData | | Data to be made available to the PlanDefinition evaluation, organized as prefetch response bundles. Each prefetchData parameter specifies either the name of the prefetchKey it is satisfying, a DataRequirement describing the prefetch, or both. |
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. | | prefetchData.key | string | The key of the prefetch item. This typically corresponds to the name of a parameter in a library, or the name of a prefetch item in a CDS Hooks discovery response. |
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. | | prefetchData.descriptor | DataRequirement | A DataRequirement describing the content of the prefetch item. |
| prefetchData.data | Bundle | The prefetch data as a Bundle. If the prefetchData has no data part, it indicates there is no data associated with this prefetch item. |
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. |
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. |
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. |
## Package ## Package
The `PlanDefinition/$package` [operation](https://build.fhir.org/ig/HL7/crmi-ig/OperationDefinition-crmi-package.html) for PlanDefinition will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html). The `PlanDefinition/$package` [operation](https://hl7.org/fhir/uv/crmi/OperationDefinition-crmi-package.html) for PlanDefinition will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://hl7.org/fhir/uv/crmi/index.html) guidance for [packaging artifacts](https://hl7.org/fhir/uv/crmi/packaging.html).
### Parameters ### Parameters
@ -80,6 +85,22 @@ The following parameters are supported for the `PlanDefinition/$package` operati
| usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. | | usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. |
## DataRequirements
The `PlanDefinition/$data-requirements` [operation](https://hl7.org/fhir/uv/crmi/OperationDefinition-crmi-data-requirements.html) for PlanDefinition will generate a Library of type `module-definition` that returns the computed effective requirements of the artifact.
### Parameters
The following parameters are supported for the `PlanDefinition/$data-requirements` operation:
| Parameter | Type | Description |
|-----------|-----------|----------------------------------------------------------------------------------------------------------------|
| id | string | The logical id of the canonical or artifact resource to analyze. |
| canonical | canonical | A canonical url (optionally version specific) to a canonical resource. |
| url | uri | A canonical or artifact reference to a canonical resource. This is exclusive with the canonical parameter. |
| version | string | The version of the canonical or artifact resource to analyze. This is exclusive with the canonical parameter. |
## Example PlanDefinition ## Example PlanDefinition
```json ```json

View File

@ -16,7 +16,7 @@ HAPI implements the following operations for Questionnaires and QuestionnaireRes
* [$populate](/docs/clinical_reasoning/questionnaires.html#populate) * [$populate](/docs/clinical_reasoning/questionnaires.html#populate)
* [$extract](/docs/clinical_reasoning/questionnaires.html#extract) * [$extract](/docs/clinical_reasoning/questionnaires.html#extract)
* [$package](/docs/clinical_reasoning/questionnaires.html#package) * [$package](/docs/clinical_reasoning/questionnaires.html#package)
* [$data-requirements](/docs/clinical_reasoning/questionnaires.html#datarequirements)
## Questionnaire ## Questionnaire
@ -26,65 +26,69 @@ The `StructureDefinition/$questionnaire` [operation]() generates a [Questionnair
The following parameters are supported for the `StructureDefinition/$questionnaire` operation: The following parameters are supported for the `StructureDefinition/$questionnaire` operation:
| Parameter | Type | Description | | Parameter | Type | Description |
|-----------|------|-------------| |---------------------|----------------------|-------------|
| profile | StructureDefinition | The StructureDefinition to base the Questionnaire on. Used when the operation is invoked at the 'type' level. | | profile | StructureDefinition | The StructureDefinition to base the Questionnaire on. Used when the operation is invoked at the 'type' level. |
| canonical | canonical | The canonical identifier for the StructureDefinition (optionally version-specific). | | canonical | canonical | The canonical identifier for the StructureDefinition (optionally version-specific). |
| url | uri | Canonical URL of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. | | url | uri | Canonical URL of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. |
| version | string | Version of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. | | version | string | Version of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. |
| supportedOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "mustSupport='true'" in the StructureDefinition. | | supportedOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "mustSupport='true'" in the StructureDefinition. |
| requiredOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "min>0" in the StructureDefinition. | | requiredOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "min>0" in the StructureDefinition. |
| subject | string | The subject(s) that is/are the target of the Questionnaire. | | contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the StructureDefinition. |
| parameters | Parameters | Any input parameters defined in libraries referenced by the StructureDefinition. | | terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the StructureDefinition. |
| useServerData | boolean Whether to use data from the server performing the evaluation. |
| data | Bundle | Data to be made available during CQL evaluation. |
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the StructureDefinition. |
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the StructureDefinition. |
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the StructureDefinition. |
## Populate ## Populate
The `Questionnaire/$populate` [operation](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html) generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire. The `Questionnaire/$populate` [operation](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html) generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire.
This implementation only allows for [Expression-based](https://hl7.org/fhir/uv/sdc/populate.html#expression-based-population) population.
Additional parameters have been added to support CQL evaluation.
### Parameters ### Parameters
The following parameters are supported for the `Questionnaire/$populate` operation: The following parameters are supported for the `Questionnaire/$populate` operation:
| Parameter | Type | Description | | Parameter | Type | Description |
|-----------|------|-------------| |---------------------|---------------|-------------|
| questionnaire | Questionnaire | The Questionnaire to populate. Used when the operation is invoked at the 'type' level. | | questionnaire | Questionnaire | The Questionnaire to populate. Used when the operation is invoked at the 'type' level. |
| canonical | canonical | The canonical identifier for the Questionnaire (optionally version-specific). | | canonical | canonical | The canonical identifier for the Questionnaire (optionally version-specific). |
| url | uri | Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. | | url | uri | Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. |
| version | string | Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. | | version | string | Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. |
| subject | string | The subject(s) that is/are the target of the Questionnaire. | | subject | Reference | The resource that is to be the QuestionnaireResponse.subject. The QuestionnaireResponse instance will reference the provided subject. |
| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. | | context | | Resources containing information to be used to help populate the QuestionnaireResponse. |
| useServerData | boolean | Whether to use data from the server performing the evaluation. | | context.name | string | The name of the launchContext or root Questionnaire variable the passed content should be used as for population purposes. The name SHALL correspond to a launchContext or variable delared at the root of the Questionnaire. |
| data | Bundle | Data to be made available during CQL evaluation. | | context.reference | Reference | The actual resource (or resources) to use as the value of the launchContext or variable. |
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the Questionnaire. | | local | boolean | Whether the server should use what resources and other knowledge it has about the referenced subject when pre-populating answers to questions. |
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the Questionnaire. | | launchContext | Extension | The [Questionnaire Launch Context](https://hl7.org/fhir/uv/sdc/StructureDefinition-sdc-questionnaire-launchContext.html) extension containing Resources that provide context for form processing logic (pre-population) when creating/displaying/editing a QuestionnaireResponse. |
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the Questionnaire. | | parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. |
| useServerData | boolean | Whether to use data from the server performing the evaluation. |
| data | Bundle | Data to be made available during CQL evaluation. |
| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the Questionnaire. |
| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the Questionnaire. |
| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the Questionnaire. |
## Extract ## Extract
The `QuestionnaireResponse/$extract` [operation](http://hl7.org/fhir/uv/sdc/OperationDefinition-QuestionnaireResponse-extract.html) takes a completed [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) and converts it to a Bundle of resources by using metadata embedded in the [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) the QuestionnaireResponse is based on. The extracted resources might include Observations, MedicationStatements and other standard FHIR resources which can then be shared and manipulated. When invoking the $extract operation, care should be taken that the submitted QuestionnaireResponse is itself valid. If not, the extract operation could fail (with appropriate OperationOutcomes) or, more problematic, might succeed but provide incorrect output. The `QuestionnaireResponse/$extract` [operation](http://hl7.org/fhir/uv/sdc/OperationDefinition-QuestionnaireResponse-extract.html) takes a completed [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) and converts it to a Bundle of resources by using metadata embedded in the [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) the QuestionnaireResponse is based on. The extracted resources might include Observations, MedicationStatements and other standard FHIR resources which can then be shared and manipulated. When invoking the $extract operation, care should be taken that the submitted QuestionnaireResponse is itself valid. If not, the extract operation could fail (with appropriate OperationOutcomes) or, more problematic, might succeed but provide incorrect output.
This implementation allows for both [Observation based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction. This implementation allows for both [Observation-based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition-based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction.
### Parameters ### Parameters
The following parameters are supported for the `QuestionnaireResponse/$extract` operation: The following parameters are supported for the `QuestionnaireResponse/$extract` operation:
| Parameter | Type | Description | | Parameter | Type | Description |
|-----------|------|-------------| |------------------------|-----------------------|-------------|
| questionnaire-response | QuestionnaireResponse | The QuestionnaireResponse to extract data from. Used when the operation is invoked at the 'type' level. | | questionnaire-response | QuestionnaireResponse | The QuestionnaireResponse to extract data from. Used when the operation is invoked at the 'type' level. |
| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. | | questionnaire | Questionnaire | The Questionnaire the QuestionnaireResponse is answering. Used when the server does not have access to the Questionnaire. |
| data | Bundle | Data to be made available during CQL evaluation. | | parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. |
| useServerData | boolean | Whether to use data from the server performing the evaluation. |
| data | Bundle | Data to be made available during CQL evaluation. |
## Package ## Package
The `Questionnaire/$package` [operation](https://build.fhir.org/ig/HL7/crmi-ig/OperationDefinition-crmi-package.html) for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html). The `Questionnaire/$package` [operation](https://hl7.org/fhir/uv/crmi/OperationDefinition-crmi-package.html) for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://hl7.org/fhir/uv/crmi/index.html) guidance for [packaging artifacts](https://hl7.org/fhir/uv/crmi/packaging.html).
### Parameters ### Parameters
@ -99,6 +103,22 @@ The following parameters are supported for the `Questionnaire/$package` operatio
| usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. | | usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. |
## DataRequirements
The `Questionnaire/$data-requirements` [operation](https://hl7.org/fhir/uv/crmi/OperationDefinition-crmi-data-requirements.html) for Questionnaire will generate a Library of type `module-definition` that returns the computed effective requirements of the artifact.
### Parameters
The following parameters are supported for the `Questionnaire/$data-requirements` operation:
| Parameter | Type | Description |
|-----------|-----------|----------------------------------------------------------------------------------------------------------------|
| id | string | The logical id of the canonical or artifact resource to analyze. |
| canonical | canonical | A canonical url (optionally version specific) to a canonical resource. |
| url | uri | A canonical or artifact reference to a canonical resource. This is exclusive with the canonical parameter. |
| version | string | The version of the canonical or artifact resource to analyze. This is exclusive with the canonical parameter. |
## Example Questionnaire ## Example Questionnaire
```json ```json

View File

@ -51,6 +51,14 @@ HAPI is built primary using [Apache Maven](http://maven.apache.org/). Even if yo
[INFO] Finished at: 2016-02-27T15:05:35+00:00 [INFO] Finished at: 2016-02-27T15:05:35+00:00
``` ```
# Rebuilding the Database Schema
Database schema is built as part of your maven build, but in case you need to rebuild it later, you can use the command:
```bash
mvn hapi-tinder:generate-ddl
```
# Troubleshooting # Troubleshooting
If the build fails to execute successfully, try the following: If the build fails to execute successfully, try the following:

View File

@ -94,6 +94,7 @@ page.clinical_reasoning.overview=Clinical Reasoning Overview
page.clinical_reasoning.cql=CQL page.clinical_reasoning.cql=CQL
page.clinical_reasoning.caregaps=Care Gaps page.clinical_reasoning.caregaps=Care Gaps
page.clinical_reasoning.measures=Measures page.clinical_reasoning.measures=Measures
page.clinical_reasoning.activity_definitions=ActivityDefinitions
page.clinical_reasoning.plan_definitions=PlanDefinitions page.clinical_reasoning.plan_definitions=PlanDefinitions
page.clinical_reasoning.questionnaires=Questionnaires page.clinical_reasoning.questionnaires=Questionnaires

View File

@ -238,3 +238,7 @@ In order to improve sorting performance when chained sorts are needed, an [Uplif
# _include and _revinclude order # _include and _revinclude order
By default, all _revincludes will be performed first and then all _includes are performed afterwards. However, if any _revinclude parameters are modified with :iterate (or :recurse for earlier versions of FHIR) then all _include parameters will be evaluated first. By default, all _revincludes will be performed first and then all _includes are performed afterwards. However, if any _revinclude parameters are modified with :iterate (or :recurse for earlier versions of FHIR) then all _include parameters will be evaluated first.
# Custom Search Parameters
HAPI FHIR has the ability to index and use custom search parameters, including parameters which enforce uniqueness, parametrs which index combinations of parameters, and parameters which are indexed but not used for searches until they are ready. See [Custom Search Parameters](https://smilecdr.com/docs/fhir_standard/fhir_search_custom_search_parameters.html) for more information.

View File

@ -70,3 +70,7 @@ If a Job Definition is set to having Gated Execution, then all work chunks for a
### Job Instance Completion ### Job Instance Completion
A Batch Job Maintenance Service runs every minute to monitor the status of all Job Instances and the Job Instance is transitioned to either `COMPLETED`, `ERRORED` or `FAILED` according to the status of all outstanding work chunks for that job instance. If the job instance is still `IN_PROGRESS` this maintenance service also estimates the time remaining to complete the job. A Batch Job Maintenance Service runs every minute to monitor the status of all Job Instances and the Job Instance is transitioned to either `COMPLETED`, `ERRORED` or `FAILED` according to the status of all outstanding work chunks for that job instance. If the job instance is still `IN_PROGRESS` this maintenance service also estimates the time remaining to complete the job.
## Logging
The job instance ID and work chunk ID are both available through the logback MDC and can be accessed using the `%X` specifier in a `logback.xml` file. See [Logging](/docs/appendix/logging.html#logging) for more details about logging in HAPI.

View File

@ -287,7 +287,7 @@ Dates are a special case, since it is a fairly common scenario to want to match
# Resource Includes (_include) # Resource Includes (_include)
FHIR allows clients to request that specific linked resources be included as contained resources, which means that they will be "embedded" in a special container called "contained" within the parent resource. Using the `_include` FHIR parameter, clients can request that specific linked resources be embedded directly within search results. These included resources will have a search.mode of "include".
HAPI allows you to add a parameter for accepting includes if you wish to support them for specific search methods. HAPI allows you to add a parameter for accepting includes if you wish to support them for specific search methods.

View File

@ -11,7 +11,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -4,7 +4,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -1474,7 +1474,9 @@ public class GenericJaxRsClientDstu2Test {
.execute(); .execute();
assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", CAPTURE_SERVLET.ourRequestUri); assertThat(CAPTURE_SERVLET.ourRequestUri).isIn(
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
// assertThat(MY_SERVLET.ourRequestUri, // assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname"))); // either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
@ -1511,7 +1513,10 @@ public class GenericJaxRsClientDstu2Test {
assertThat(CAPTURE_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?"); assertThat(CAPTURE_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
assertThat(CAPTURE_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname"); assertThat(CAPTURE_SERVLET.ourRequestUri).satisfiesAnyOf(
s -> assertThat(s).contains("_elements=identifier%2Cname"),
s -> assertThat(s).contains("_elements=name%2Cidentifier")
);
// assertThat(MY_SERVLET.ourRequestUri, // assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname"))); // either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));

View File

@ -1546,7 +1546,9 @@ public class GenericJaxRsClientDstu3Test {
.execute(); .execute();
//@formatter:on //@formatter:on
assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", MY_SERVLET.ourRequestUri); assertThat(MY_SERVLET.ourRequestUri).isIn(
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
// assertThat(MY_SERVLET.ourRequestUri, // assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname"))); // either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
@ -1583,7 +1585,10 @@ public class GenericJaxRsClientDstu3Test {
//@formatter:on //@formatter:on
assertThat(MY_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?"); assertThat(MY_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
assertThat(MY_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname"); assertThat(MY_SERVLET.ourRequestUri).satisfiesAnyOf(
s -> assertThat(s).contains("_elements=identifier%2Cname"),
s -> assertThat(s).contains("_elements=name%2Cidentifier")
);
// assertThat(MY_SERVLET.ourRequestUri, // assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname"))); // either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId> <artifactId>hapi-deployable-pom</artifactId>
<version>7.5.1-SNAPSHOT</version> <version>7.7.3-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath> <relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent> </parent>
@ -414,7 +414,6 @@
<plugin> <plugin>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-tinder-plugin</artifactId> <artifactId>hapi-tinder-plugin</artifactId>
<version>${project.version}</version>
<executions> <executions>
<execution> <execution>
<id>build_dstu2</id> <id>build_dstu2</id>
@ -525,7 +524,6 @@
<plugin> <plugin>
<groupId>ca.uhn.hapi.fhir</groupId> <groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-tinder-plugin</artifactId> <artifactId>hapi-tinder-plugin</artifactId>
<version>${project.version}</version>
<executions> <executions>
<execution> <execution>
<goals> <goals>
@ -534,6 +532,7 @@
</execution> </execution>
</executions> </executions>
<configuration> <configuration>
<skip>false</skip>
<packageNames> <packageNames>
<packageName>ca.uhn.fhir.jpa.entity</packageName> <packageName>ca.uhn.fhir.jpa.entity</packageName>
<packageName>ca.uhn.fhir.jpa.model.entity</packageName> <packageName>ca.uhn.fhir.jpa.model.entity</packageName>

View File

@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.batch2;
import ca.uhn.fhir.batch2.api.IJobPersistence; import ca.uhn.fhir.batch2.api.IJobPersistence;
import ca.uhn.fhir.batch2.api.JobOperationResultJson; import ca.uhn.fhir.batch2.api.JobOperationResultJson;
import ca.uhn.fhir.batch2.model.BatchInstanceStatusDTO;
import ca.uhn.fhir.batch2.model.BatchWorkChunkStatusDTO;
import ca.uhn.fhir.batch2.model.FetchJobInstancesRequest; import ca.uhn.fhir.batch2.model.FetchJobInstancesRequest;
import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.StatusEnum;
@ -258,6 +260,22 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
.execute(() -> myJobInstanceRepository.findById(theInstanceId).map(this::toInstance)); .execute(() -> myJobInstanceRepository.findById(theInstanceId).map(this::toInstance));
} }
@Nonnull
@Override
public List<BatchWorkChunkStatusDTO> fetchWorkChunkStatusForInstance(String theInstanceId) {
return myTransactionService
.withSystemRequestOnDefaultPartition()
.execute(() -> myWorkChunkRepository.fetchWorkChunkStatusForInstance(theInstanceId));
}
@Nonnull
@Override
public BatchInstanceStatusDTO fetchBatchInstanceStatus(String theInstanceId) {
return myTransactionService
.withSystemRequestOnDefaultPartition()
.execute(() -> myJobInstanceRepository.fetchBatchInstanceStatus(theInstanceId));
}
@Override @Override
@Transactional(propagation = Propagation.REQUIRES_NEW) @Transactional(propagation = Propagation.REQUIRES_NEW)
public List<JobInstance> fetchInstances(FetchJobInstancesRequest theRequest, int thePage, int theBatchSize) { public List<JobInstance> fetchInstances(FetchJobInstancesRequest theRequest, int thePage, int theBatchSize) {

View File

@ -625,7 +625,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
resourceToCheck = "Patient"; resourceToCheck = "Patient";
activeSearchParamName = "organization"; activeSearchParamName = "organization";
} }
return mySearchParamRegistry.getActiveSearchParam(resourceToCheck, activeSearchParamName); return mySearchParamRegistry.getActiveSearchParam(
resourceToCheck, activeSearchParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
} }
/** /**

View File

@ -43,6 +43,7 @@ import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl; import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc; import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl; import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
import ca.uhn.fhir.jpa.dao.CacheTagDefinitionDao;
import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider; import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider;
import ca.uhn.fhir.jpa.dao.HistoryBuilder; import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory; import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
@ -56,6 +57,7 @@ import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.TransactionProcessor; import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao; import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao; import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation; import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService; import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
@ -377,17 +379,17 @@ public class JpaConfig {
@Bean @Bean
public TaskScheduler taskScheduler() { public TaskScheduler taskScheduler() {
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(); ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(
retVal.setConcurrentExecutor(scheduledExecutorService().getObject()); scheduledExecutorService().getObject(),
retVal.setScheduledExecutor(scheduledExecutorService().getObject()); scheduledExecutorService().getObject());
return retVal; return retVal;
} }
@Bean(name = TASK_EXECUTOR_NAME) @Bean(name = TASK_EXECUTOR_NAME)
public AsyncTaskExecutor taskExecutor() { public AsyncTaskExecutor taskExecutor() {
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(); ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(
retVal.setConcurrentExecutor(scheduledExecutorService().getObject()); scheduledExecutorService().getObject(),
retVal.setScheduledExecutor(scheduledExecutorService().getObject()); scheduledExecutorService().getObject());
return retVal; return retVal;
} }
@ -893,4 +895,10 @@ public class JpaConfig {
FhirContext theFhirContext, HibernatePropertiesProvider theHibernatePropertiesProvider) { FhirContext theFhirContext, HibernatePropertiesProvider theHibernatePropertiesProvider) {
return new ResourceHistoryCalculator(theFhirContext, theHibernatePropertiesProvider.isOracleDialect()); return new ResourceHistoryCalculator(theFhirContext, theHibernatePropertiesProvider.isOracleDialect());
} }
@Bean
public CacheTagDefinitionDao tagDefinitionDao(
ITagDefinitionDao tagDefinitionDao, MemoryCacheService memoryCacheService) {
return new CacheTagDefinitionDao(tagDefinitionDao, memoryCacheService);
}
} }

View File

@ -75,7 +75,6 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc; import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.util.AddRemoveCount; import ca.uhn.fhir.jpa.util.AddRemoveCount;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.util.QueryChunker; import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
@ -89,7 +88,6 @@ import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum; import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails; import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@ -107,14 +105,8 @@ import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
import jakarta.annotation.PostConstruct; import jakarta.annotation.PostConstruct;
import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManager;
import jakarta.persistence.NoResultException;
import jakarta.persistence.PersistenceContext; import jakarta.persistence.PersistenceContext;
import jakarta.persistence.PersistenceContextType; import jakarta.persistence.PersistenceContextType;
import jakarta.persistence.TypedQuery;
import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Predicate;
import jakarta.persistence.criteria.Root;
import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
@ -136,19 +128,11 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware; import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.IdentityHashMap; import java.util.IdentityHashMap;
import java.util.List; import java.util.List;
@ -158,7 +142,6 @@ import java.util.stream.Collectors;
import javax.xml.stream.events.Characters; import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent; import javax.xml.stream.events.XMLEvent;
import static java.util.Objects.isNull;
import static java.util.Objects.nonNull; import static java.util.Objects.nonNull;
import static org.apache.commons.collections4.CollectionUtils.isEqualCollection; import static org.apache.commons.collections4.CollectionUtils.isEqualCollection;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
@ -182,8 +165,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
public static final long INDEX_STATUS_INDEXED = 1L; public static final long INDEX_STATUS_INDEXED = 1L;
public static final long INDEX_STATUS_INDEXING_FAILED = 2L; public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
public static final String NS_JPA_PROFILE = "https://github.com/hapifhir/hapi-fhir/ns/jpa/profile"; public static final String NS_JPA_PROFILE = "https://github.com/hapifhir/hapi-fhir/ns/jpa/profile";
// total attempts to do a tag transaction
private static final int TOTAL_TAG_READ_ATTEMPTS = 10;
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class); private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class);
private static boolean ourValidationDisabledForUnitTest; private static boolean ourValidationDisabledForUnitTest;
private static boolean ourDisableIncrementOnUpdateForUnitTest = false; private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
@ -248,17 +229,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
@Autowired @Autowired
private IPartitionLookupSvc myPartitionLookupSvc; private IPartitionLookupSvc myPartitionLookupSvc;
@Autowired
private MemoryCacheService myMemoryCacheService;
@Autowired(required = false) @Autowired(required = false)
private IFulltextSearchSvc myFulltextSearchSvc; private IFulltextSearchSvc myFulltextSearchSvc;
@Autowired @Autowired
private PlatformTransactionManager myTransactionManager; protected ResourceHistoryCalculator myResourceHistoryCalculator;
@Autowired @Autowired
protected ResourceHistoryCalculator myResourceHistoryCalculator; protected CacheTagDefinitionDao cacheTagDefinitionDao;
protected final CodingSpy myCodingSpy = new CodingSpy(); protected final CodingSpy myCodingSpy = new CodingSpy();
@ -307,7 +285,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(theResource); TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(theResource);
if (tagList != null) { if (tagList != null) {
for (Tag next : tagList) { for (Tag next : tagList) {
TagDefinition def = getTagOrNull( TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, theTransactionDetails,
TagTypeEnum.TAG, TagTypeEnum.TAG,
next.getScheme(), next.getScheme(),
@ -326,7 +304,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
List<BaseCodingDt> securityLabels = ResourceMetadataKeyEnum.SECURITY_LABELS.get(theResource); List<BaseCodingDt> securityLabels = ResourceMetadataKeyEnum.SECURITY_LABELS.get(theResource);
if (securityLabels != null) { if (securityLabels != null) {
for (BaseCodingDt next : securityLabels) { for (BaseCodingDt next : securityLabels) {
TagDefinition def = getTagOrNull( TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, theTransactionDetails,
TagTypeEnum.SECURITY_LABEL, TagTypeEnum.SECURITY_LABEL,
next.getSystemElement().getValue(), next.getSystemElement().getValue(),
@ -345,7 +323,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
List<IdDt> profiles = ResourceMetadataKeyEnum.PROFILES.get(theResource); List<IdDt> profiles = ResourceMetadataKeyEnum.PROFILES.get(theResource);
if (profiles != null) { if (profiles != null) {
for (IIdType next : profiles) { for (IIdType next : profiles) {
TagDefinition def = getTagOrNull( TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null); theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null);
if (def != null) { if (def != null) {
ResourceTag tag = theEntity.addTag(def); ResourceTag tag = theEntity.addTag(def);
@ -364,7 +342,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
List<? extends IBaseCoding> tagList = theResource.getMeta().getTag(); List<? extends IBaseCoding> tagList = theResource.getMeta().getTag();
if (tagList != null) { if (tagList != null) {
for (IBaseCoding next : tagList) { for (IBaseCoding next : tagList) {
TagDefinition def = getTagOrNull( TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, theTransactionDetails,
TagTypeEnum.TAG, TagTypeEnum.TAG,
next.getSystem(), next.getSystem(),
@ -383,7 +361,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
List<? extends IBaseCoding> securityLabels = theResource.getMeta().getSecurity(); List<? extends IBaseCoding> securityLabels = theResource.getMeta().getSecurity();
if (securityLabels != null) { if (securityLabels != null) {
for (IBaseCoding next : securityLabels) { for (IBaseCoding next : securityLabels) {
TagDefinition def = getTagOrNull( TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, theTransactionDetails,
TagTypeEnum.SECURITY_LABEL, TagTypeEnum.SECURITY_LABEL,
next.getSystem(), next.getSystem(),
@ -402,7 +380,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
List<? extends IPrimitiveType<String>> profiles = theResource.getMeta().getProfile(); List<? extends IPrimitiveType<String>> profiles = theResource.getMeta().getProfile();
if (profiles != null) { if (profiles != null) {
for (IPrimitiveType<String> next : profiles) { for (IPrimitiveType<String> next : profiles) {
TagDefinition def = getTagOrNull( TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null); theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null);
if (def != null) { if (def != null) {
ResourceTag tag = theEntity.addTag(def); ResourceTag tag = theEntity.addTag(def);
@ -422,7 +400,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (!def.isStandardType()) { if (!def.isStandardType()) {
String profile = def.getResourceProfile(""); String profile = def.getResourceProfile("");
if (isNotBlank(profile)) { if (isNotBlank(profile)) {
TagDefinition profileDef = getTagOrNull( TagDefinition profileDef = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null, null, null); theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null, null, null);
ResourceTag tag = theEntity.addTag(profileDef); ResourceTag tag = theEntity.addTag(profileDef);
@ -456,164 +434,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
myContext = theContext; myContext = theContext;
} }
/**
* <code>null</code> will only be returned if the scheme and tag are both blank
*/
protected TagDefinition getTagOrNull(
TransactionDetails theTransactionDetails,
TagTypeEnum theTagType,
String theScheme,
String theTerm,
String theLabel,
String theVersion,
Boolean theUserSelected) {
if (isBlank(theScheme) && isBlank(theTerm) && isBlank(theLabel)) {
return null;
}
MemoryCacheService.TagDefinitionCacheKey key =
toTagDefinitionMemoryCacheKey(theTagType, theScheme, theTerm, theVersion, theUserSelected);
TagDefinition retVal = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key);
if (retVal == null) {
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions =
theTransactionDetails.getOrCreateUserData(
HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, HashMap::new);
retVal = resolvedTagDefinitions.get(key);
if (retVal == null) {
// actual DB hit(s) happen here
retVal = getOrCreateTag(theTagType, theScheme, theTerm, theLabel, theVersion, theUserSelected);
TransactionSynchronization sync = new AddTagDefinitionToCacheAfterCommitSynchronization(key, retVal);
TransactionSynchronizationManager.registerSynchronization(sync);
resolvedTagDefinitions.put(key, retVal);
}
}
return retVal;
}
/**
* Gets the tag defined by the fed in values, or saves it if it does not
* exist.
* <p>
* Can also throw an InternalErrorException if something bad happens.
*/
private TagDefinition getOrCreateTag(
TagTypeEnum theTagType,
String theScheme,
String theTerm,
String theLabel,
String theVersion,
Boolean theUserSelected) {
TypedQuery<TagDefinition> q = buildTagQuery(theTagType, theScheme, theTerm, theVersion, theUserSelected);
q.setMaxResults(1);
TransactionTemplate template = new TransactionTemplate(myTransactionManager);
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
// this transaction will attempt to get or create the tag,
// repeating (on any failure) 10 times.
// if it fails more than this, we will throw exceptions
TagDefinition retVal;
int count = 0;
HashSet<Throwable> throwables = new HashSet<>();
do {
try {
retVal = template.execute(new TransactionCallback<TagDefinition>() {
// do the actual DB call(s) to read and/or write the values
private TagDefinition readOrCreate() {
TagDefinition val;
try {
val = q.getSingleResult();
} catch (NoResultException e) {
val = new TagDefinition(theTagType, theScheme, theTerm, theLabel);
val.setVersion(theVersion);
val.setUserSelected(theUserSelected);
myEntityManager.persist(val);
}
return val;
}
@Override
public TagDefinition doInTransaction(TransactionStatus status) {
TagDefinition tag = null;
try {
tag = readOrCreate();
} catch (Exception ex) {
// log any exceptions - just in case
// they may be signs of things to come...
ourLog.warn(
"Tag read/write failed: "
+ ex.getMessage() + ". "
+ "This is not a failure on its own, "
+ "but could be useful information in the result of an actual failure.",
ex);
throwables.add(ex);
}
return tag;
}
});
} catch (Exception ex) {
// transaction template can fail if connections to db are exhausted and/or timeout
ourLog.warn(
"Transaction failed with: {}. Transaction will rollback and be reattempted.", ex.getMessage());
retVal = null;
}
count++;
} while (retVal == null && count < TOTAL_TAG_READ_ATTEMPTS);
if (retVal == null) {
// if tag is still null,
// something bad must be happening
// - throw
String msg = throwables.stream().map(Throwable::getMessage).collect(Collectors.joining(", "));
throw new InternalErrorException(Msg.code(2023)
+ "Tag get/create failed after "
+ TOTAL_TAG_READ_ATTEMPTS
+ " attempts with error(s): "
+ msg);
}
return retVal;
}
private TypedQuery<TagDefinition> buildTagQuery(
TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<TagDefinition> cq = builder.createQuery(TagDefinition.class);
Root<TagDefinition> from = cq.from(TagDefinition.class);
List<Predicate> predicates = new ArrayList<>();
predicates.add(builder.and(
builder.equal(from.get("myTagType"), theTagType), builder.equal(from.get("myCode"), theTerm)));
predicates.add(
isBlank(theScheme)
? builder.isNull(from.get("mySystem"))
: builder.equal(from.get("mySystem"), theScheme));
predicates.add(
isBlank(theVersion)
? builder.isNull(from.get("myVersion"))
: builder.equal(from.get("myVersion"), theVersion));
predicates.add(
isNull(theUserSelected)
? builder.isNull(from.get("myUserSelected"))
: builder.equal(from.get("myUserSelected"), theUserSelected));
cq.where(predicates.toArray(new Predicate[0]));
return myEntityManager.createQuery(cq);
}
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) { void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
if (theResourceId == null || theResourceId.getVersionIdPart() == null) { if (theResourceId == null || theResourceId.getVersionIdPart() == null) {
theSavedEntity.initializeVersion(); theSavedEntity.initializeVersion();
@ -933,7 +753,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
@Override @Override
@CoverageIgnore @CoverageIgnore
public BaseHasResource readEntity(IIdType theValueId, RequestDetails theRequest) { public BaseHasResource readEntity(IIdType theValueId, RequestDetails theRequest) {
throw new NotImplementedException(Msg.code(927) + ""); throw new NotImplementedException(Msg.code(927));
} }
/** /**
@ -1839,9 +1659,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} }
@PostConstruct @PostConstruct
public void start() { public void start() {}
// nothing yet
}
@VisibleForTesting @VisibleForTesting
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) { public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
@ -1880,30 +1698,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
myJpaStorageResourceParser = theJpaStorageResourceParser; myJpaStorageResourceParser = theJpaStorageResourceParser;
} }
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
private final TagDefinition myTagDefinition;
private final MemoryCacheService.TagDefinitionCacheKey myKey;
public AddTagDefinitionToCacheAfterCommitSynchronization(
MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
myTagDefinition = theTagDefinition;
myKey = theKey;
}
@Override
public void afterCommit() {
myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
}
}
@Nonnull
public static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(
TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) {
return new MemoryCacheService.TagDefinitionCacheKey(
theTagType, theScheme, theTerm, theVersion, theUserSelected);
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static String parseContentTextIntoWords(FhirContext theContext, IBaseResource theResource) { public static String parseContentTextIntoWords(FhirContext theContext, IBaseResource theResource) {

View File

@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.IJobPartitionProvider; import ca.uhn.fhir.batch2.api.IJobPartitionProvider;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters; import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.FhirVersionEnum;
@ -158,6 +157,7 @@ import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX;
import static java.util.Objects.isNull; import static java.util.Objects.isNull;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
@ -1043,7 +1043,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
if (!entityHasTag) { if (!entityHasTag) {
theEntity.setHasTags(true); theEntity.setHasTags(true);
TagDefinition def = getTagOrNull( TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, theTransactionDetails,
nextDef.getTagType(), nextDef.getTagType(),
nextDef.getSystem(), nextDef.getSystem(),
@ -1315,7 +1315,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
myJobPartitionProvider.getPartitionedUrls(theRequestDetails, urls).forEach(params::addPartitionedUrl); myJobPartitionProvider.getPartitionedUrls(theRequestDetails, urls).forEach(params::addPartitionedUrl);
JobInstanceStartRequest request = new JobInstanceStartRequest(); JobInstanceStartRequest request = new JobInstanceStartRequest();
request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX); request.setJobDefinitionId(JOB_REINDEX);
request.setParameters(params); request.setParameters(params);
myJobCoordinator.startInstance(theRequestDetails, request); myJobCoordinator.startInstance(theRequestDetails, request);

View File

@ -0,0 +1,132 @@
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import jakarta.annotation.Nonnull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import java.util.HashMap;
import java.util.List;
import static org.apache.commons.lang3.StringUtils.isBlank;
@Repository
public class CacheTagDefinitionDao {
private static final Logger ourLog = LoggerFactory.getLogger(CacheTagDefinitionDao.class);
private final ITagDefinitionDao tagDefinitionDao;
private final MemoryCacheService memoryCacheService;
public CacheTagDefinitionDao(ITagDefinitionDao tagDefinitionDao, MemoryCacheService memoryCacheService) {
this.tagDefinitionDao = tagDefinitionDao;
this.memoryCacheService = memoryCacheService;
}
/**
* Returns a TagDefinition or null if the scheme, term, and label are all blank.
*/
protected TagDefinition getTagOrNull(
TransactionDetails transactionDetails,
TagTypeEnum tagType,
String scheme,
String term,
String label,
String version,
Boolean userSelected) {
if (isBlank(scheme) && isBlank(term) && isBlank(label)) {
return null;
}
MemoryCacheService.TagDefinitionCacheKey key =
toTagDefinitionMemoryCacheKey(tagType, scheme, term, version, userSelected);
TagDefinition tagDefinition = memoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key);
if (tagDefinition == null) {
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions =
transactionDetails.getOrCreateUserData("resolvedTagDefinitions", HashMap::new);
tagDefinition = resolvedTagDefinitions.get(key);
if (tagDefinition == null) {
tagDefinition = getOrCreateTag(tagType, scheme, term, label, version, userSelected);
TransactionSynchronization sync =
new AddTagDefinitionToCacheAfterCommitSynchronization(key, tagDefinition);
TransactionSynchronizationManager.registerSynchronization(sync);
resolvedTagDefinitions.put(key, tagDefinition);
}
}
return tagDefinition;
}
/**
* Gets or creates a TagDefinition entity.
*/
private TagDefinition getOrCreateTag(
TagTypeEnum tagType, String scheme, String term, String label, String version, Boolean userSelected) {
List<TagDefinition> result = tagDefinitionDao.findByTagTypeAndSchemeAndTermAndVersionAndUserSelected(
tagType, scheme, term, version, userSelected, Pageable.ofSize(1));
if (!result.isEmpty()) {
return result.get(0);
} else {
// Create a new TagDefinition if no result is found
TagDefinition newTag = new TagDefinition(tagType, scheme, term, label);
newTag.setVersion(version);
newTag.setUserSelected(userSelected);
return tagDefinitionDao.save(newTag);
}
}
@Nonnull
private static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(
TagTypeEnum tagType, String scheme, String term, String version, Boolean userSelected) {
return new MemoryCacheService.TagDefinitionCacheKey(tagType, scheme, term, version, userSelected);
}
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
private final TagDefinition tagDefinition;
private final MemoryCacheService.TagDefinitionCacheKey key;
public AddTagDefinitionToCacheAfterCommitSynchronization(
MemoryCacheService.TagDefinitionCacheKey key, TagDefinition tagDefinition) {
this.tagDefinition = tagDefinition;
this.key = key;
}
@Override
public void afterCommit() {
memoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, key, tagDefinition);
}
}
}

View File

@ -137,7 +137,8 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
public ExtendedHSearchIndexData extractLuceneIndexData( public ExtendedHSearchIndexData extractLuceneIndexData(
IBaseResource theResource, ResourceIndexedSearchParams theNewParams) { IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
String resourceType = myFhirContext.getResourceType(theResource); String resourceType = myFhirContext.getResourceType(theResource);
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(resourceType); ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
resourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor( ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor); myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
return extractor.extract(theResource, theNewParams); return extractor.extract(theResource, theNewParams);

View File

@ -27,7 +27,11 @@ import ca.uhn.fhir.context.support.IValidationSupport.CodeValidationResult;
import ca.uhn.fhir.context.support.LookupCodeRequest; import ca.uhn.fhir.context.support.LookupCodeRequest;
import ca.uhn.fhir.context.support.ValidationSupportContext; import ca.uhn.fhir.context.support.ValidationSupportContext;
import ca.uhn.fhir.i18n.Msg; import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.api.dao.ReindexOutcome;
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
import ca.uhn.fhir.jpa.api.model.ReindexJobStatus;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -176,6 +180,47 @@ public class JpaResourceDaoCodeSystem<T extends IBaseResource> extends BaseHapiF
myTermDeferredStorageSvc.deleteCodeSystemForResource(theEntityToDelete); myTermDeferredStorageSvc.deleteCodeSystemForResource(theEntityToDelete);
} }
/**
* If there are more code systems to process
* than {@link JpaStorageSettings#getDeferIndexingForCodesystemsOfSize()},
* then these codes will have their processing deferred (for a later time).
*
* This can result in future reindex steps *skipping* these code systems (if
* they're still deferred) and thus incorrect expansions resulting.
*
* So we override the reindex method for CodeSystems specifically to
* force reindex batch jobs to wait until all code systems are processed before
* moving on.
*/
@SuppressWarnings("rawtypes")
@Override
public ReindexOutcome reindex(
IResourcePersistentId thePid,
ReindexParameters theReindexParameters,
RequestDetails theRequest,
TransactionDetails theTransactionDetails) {
ReindexOutcome outcome = super.reindex(thePid, theReindexParameters, theRequest, theTransactionDetails);
if (outcome.getWarnings().isEmpty()) {
outcome.setHasPendingWork(true);
}
return outcome;
}
@Override
public ReindexJobStatus getReindexJobStatus() {
boolean isQueueEmpty = myTermDeferredStorageSvc.isStorageQueueEmpty(true);
ReindexJobStatus status = new ReindexJobStatus();
status.setHasReindexWorkPending(!isQueueEmpty);
if (status.isHasReindexWorkPending()) {
// force a run
myTermDeferredStorageSvc.saveDeferred();
}
return status;
}
@Override @Override
public ResourceTable updateEntity( public ResourceTable updateEntity(
RequestDetails theRequest, RequestDetails theRequest,

View File

@ -19,6 +19,7 @@
*/ */
package ca.uhn.fhir.jpa.dao.data; package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.batch2.model.BatchInstanceStatusDTO;
import ca.uhn.fhir.batch2.model.StatusEnum; import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity; import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
@ -91,4 +92,8 @@ public interface IBatch2JobInstanceRepository
@Query("SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId") @Query("SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId")
List<Batch2JobInstanceEntity> findInstancesByJobDefinitionId( List<Batch2JobInstanceEntity> findInstancesByJobDefinitionId(
@Param("jobDefinitionId") String theJobDefinitionId, Pageable thePageRequest); @Param("jobDefinitionId") String theJobDefinitionId, Pageable thePageRequest);
@Query(
"SELECT new ca.uhn.fhir.batch2.model.BatchInstanceStatusDTO(e.myId, e.myStatus, e.myStartTime, e.myEndTime) FROM Batch2JobInstanceEntity e WHERE e.myId = :id")
BatchInstanceStatusDTO fetchBatchInstanceStatus(@Param("id") String theInstanceId);
} }

View File

@ -19,6 +19,7 @@
*/ */
package ca.uhn.fhir.jpa.dao.data; package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.batch2.model.BatchWorkChunkStatusDTO;
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum; import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity; import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
@ -147,4 +148,8 @@ public interface IBatch2WorkChunkRepository
@Param("instanceId") String theInstanceId, @Param("instanceId") String theInstanceId,
@Param("stepId") String theStepId, @Param("stepId") String theStepId,
@Param("status") WorkChunkStatusEnum theStatus); @Param("status") WorkChunkStatusEnum theStatus);
@Query(
"SELECT new ca.uhn.fhir.batch2.model.BatchWorkChunkStatusDTO(e.myTargetStepId, e.myStatus, min(e.myStartTime), max(e.myEndTime), avg(e.myEndTime - e.myStartTime), count(*)) FROM Batch2WorkChunkEntity e WHERE e.myInstanceId=:instanceId GROUP BY e.myTargetStepId, e.myStatus")
List<BatchWorkChunkStatusDTO> fetchWorkChunkStatusForInstance(@Param("instanceId") String theInstanceId);
} }

View File

@ -20,8 +20,25 @@
package ca.uhn.fhir.jpa.dao.data; package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
public interface ITagDefinitionDao extends JpaRepository<TagDefinition, Long>, IHapiFhirJpaRepository { public interface ITagDefinitionDao extends JpaRepository<TagDefinition, Long>, IHapiFhirJpaRepository {
// nothing @Query("SELECT t FROM TagDefinition t WHERE " + "t.myTagType = :tagType AND "
+ "( :scheme IS NULL OR :scheme = '' OR t.mySystem = :scheme ) AND "
+ "t.myCode = :term AND "
+ "( :version IS NULL OR :version = '' OR t.myVersion = :version ) AND "
+ "( :userSelected IS NULL OR t.myUserSelected = :userSelected )")
List<TagDefinition> findByTagTypeAndSchemeAndTermAndVersionAndUserSelected(
@Param("tagType") TagTypeEnum tagType,
@Param("scheme") String scheme,
@Param("term") String term,
@Param("version") String version,
@Param("userSelected") Boolean userSelected,
Pageable pageable);
} }

View File

@ -92,7 +92,8 @@ public class ExtendedHSearchSearchBuilder {
String theResourceType, SearchParameterMap myParams, ISearchParamRegistry theSearchParamRegistry) { String theResourceType, SearchParameterMap myParams, ISearchParamRegistry theSearchParamRegistry) {
boolean canUseHibernate = false; boolean canUseHibernate = false;
ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(theResourceType); ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(
theResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
for (String paramName : myParams.keySet()) { for (String paramName : myParams.keySet()) {
// is this parameter supported? // is this parameter supported?
if (illegalForHibernateSearch(paramName, resourceActiveSearchParams)) { if (illegalForHibernateSearch(paramName, resourceActiveSearchParams)) {
@ -218,7 +219,8 @@ public class ExtendedHSearchSearchBuilder {
// copy the keys to avoid concurrent modification error // copy the keys to avoid concurrent modification error
ArrayList<String> paramNames = compileParamNames(searchParameterMap); ArrayList<String> paramNames = compileParamNames(searchParameterMap);
ResourceSearchParams activeSearchParams = searchParamRegistry.getActiveSearchParams(resourceType); ResourceSearchParams activeSearchParams = searchParamRegistry.getActiveSearchParams(
resourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
for (String nextParam : paramNames) { for (String nextParam : paramNames) {
if (illegalForHibernateSearch(nextParam, activeSearchParams)) { if (illegalForHibernateSearch(nextParam, activeSearchParams)) {
// ignore magic params handled in JPA // ignore magic params handled in JPA

View File

@ -151,7 +151,8 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper {
*/ */
@VisibleForTesting @VisibleForTesting
Optional<RestSearchParameterTypeEnum> getParamType(String theResourceTypeName, String theParamName) { Optional<RestSearchParameterTypeEnum> getParamType(String theResourceTypeName, String theParamName) {
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(theResourceTypeName); ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
theResourceTypeName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
RuntimeSearchParam searchParam = activeSearchParams.get(theParamName); RuntimeSearchParam searchParam = activeSearchParams.get(theParamName);
if (searchParam == null) { if (searchParam == null) {
return Optional.empty(); return Optional.empty();

View File

@ -181,7 +181,8 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider {
for (String nextResourceType : theResourceTypes) { for (String nextResourceType : theResourceTypes) {
StructureDefinition sd = fetchStructureDefinition(nextResourceType); StructureDefinition sd = fetchStructureDefinition(nextResourceType);
List<SearchParameter> parameters = toR5SearchParams(mySearchParamRegistry List<SearchParameter> parameters = toR5SearchParams(mySearchParamRegistry
.getActiveSearchParams(nextResourceType) .getActiveSearchParams(
nextResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
.values()); .values());
myGenerator.generateResource(writer, sd, parameters, theOperations); myGenerator.generateResource(writer, sd, parameters, theOperations);
} }
@ -198,7 +199,8 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider {
} }
if (theOperations.contains(GraphQLSchemaGenerator.FHIROperationType.SEARCH)) { if (theOperations.contains(GraphQLSchemaGenerator.FHIROperationType.SEARCH)) {
List<SearchParameter> parameters = toR5SearchParams(mySearchParamRegistry List<SearchParameter> parameters = toR5SearchParams(mySearchParamRegistry
.getActiveSearchParams(nextResourceType) .getActiveSearchParams(
nextResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
.values()); .values());
myGenerator.generateListAccessQuery(writer, parameters, nextResourceType); myGenerator.generateListAccessQuery(writer, parameters, nextResourceType);
myGenerator.generateConnectionAccessQuery(writer, parameters, nextResourceType); myGenerator.generateConnectionAccessQuery(writer, parameters, nextResourceType);

Some files were not shown because too many files have changed in this diff Show More