diff --git a/hapi-deployable-pom/pom.xml b/hapi-deployable-pom/pom.xml
index a63d30df6f8..0218ad0e976 100644
--- a/hapi-deployable-pom/pom.xml
+++ b/hapi-deployable-pom/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-android/pom.xml b/hapi-fhir-android/pom.xml
index bbb4e7d9acb..cad2fab3b55 100644
--- a/hapi-fhir-android/pom.xml
+++ b/hapi-fhir-android/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/pom.xml b/hapi-fhir-base/pom.xml
index b845c698f1b..10e5dd64e26 100644
--- a/hapi-fhir-base/pom.xml
+++ b/hapi-fhir-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java
index 41ec12464dd..8982f23ade3 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/BaseRuntimeElementDefinition.java
@@ -33,6 +33,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
public abstract class BaseRuntimeElementDefinition {
@@ -40,7 +41,7 @@ public abstract class BaseRuntimeElementDefinition {
private final Class extends T> myImplementingClass;
private final String myName;
private final boolean myStandardType;
- private Map, Constructor> myConstructors = Collections.synchronizedMap(new HashMap<>());
+ private final Map, Constructor> myConstructors = new ConcurrentHashMap<>();
private List myExtensions = new ArrayList<>();
private List myExtensionsModifier = new ArrayList<>();
private List myExtensionsNonModifier = new ArrayList<>();
@@ -84,27 +85,24 @@ public abstract class BaseRuntimeElementDefinition {
argumentType = theArgument.getClass();
}
- Constructor retVal = myConstructors.get(argumentType);
- if (retVal == null) {
+ Constructor retVal = myConstructors.computeIfAbsent(argumentType, type -> {
for (Constructor> next : getImplementingClass().getConstructors()) {
- if (argumentType == VOID_CLASS) {
+ if (type == VOID_CLASS) {
if (next.getParameterTypes().length == 0) {
- retVal = (Constructor) next;
- break;
- }
- } else if (next.getParameterTypes().length == 1) {
- if (next.getParameterTypes()[0].isAssignableFrom(argumentType)) {
- retVal = (Constructor) next;
- break;
+ return (Constructor) next;
}
+ } else if (next.getParameterTypes().length == 1 && next.getParameterTypes()[0].isAssignableFrom(type)) {
+ return (Constructor) next;
}
}
- if (retVal == null) {
- throw new ConfigurationException(Msg.code(1695) + "Class " + getImplementingClass()
- + " has no constructor with a single argument of type " + argumentType);
- }
- myConstructors.put(argumentType, retVal);
+ return null;
+ });
+
+ if (retVal == null) {
+ throw new ConfigurationException(Msg.code(1695) + "Class " + getImplementingClass()
+ + " has no constructor with a single argument of type " + argumentType);
}
+
return retVal;
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java
index 38afad702c0..51b28adb95d 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/RuntimeSearchParam.java
@@ -60,6 +60,7 @@ public class RuntimeSearchParam {
private final List myComponents;
private final IIdType myIdUnqualifiedVersionless;
private IPhoneticEncoder myPhoneticEncoder;
+ private boolean myEnabledForSearching = true;
/**
* Constructor
@@ -166,6 +167,24 @@ public class RuntimeSearchParam {
}
}
+ /**
+ * Is this search parameter actually enabled for being used in searches (as opposed to only being used for
+ * generating indexes, which might be desired while the search parameter is still being indexed). This
+ * setting defaults to {@literal true} if it isn't set otherwise.
+ */
+ public boolean isEnabledForSearching() {
+ return myEnabledForSearching;
+ }
+
+ /**
+ * Is this search parameter actually enabled for being used in searches (as opposed to only being used for
+ * generating indexes, which might be desired while the search parameter is still being indexed). This
+ * setting defaults to {@literal true} if it isn't set otherwise.
+ */
+ public void setEnabledForSearching(boolean theEnabledForSearching) {
+ myEnabledForSearching = theEnabledForSearching;
+ }
+
public List getComponents() {
return myComponents;
}
@@ -361,13 +380,6 @@ public class RuntimeSearchParam {
return !myUpliftRefchains.isEmpty();
}
- public enum RuntimeSearchParamStatusEnum {
- ACTIVE,
- DRAFT,
- RETIRED,
- UNKNOWN
- }
-
/**
* This method tests whether a given FHIRPath expression could
* possibly apply to the given resource type.
@@ -413,6 +425,13 @@ public class RuntimeSearchParam {
return false;
}
+ public enum RuntimeSearchParamStatusEnum {
+ ACTIVE,
+ DRAFT,
+ RETIRED,
+ UNKNOWN
+ }
+
public static class Component {
private final String myExpression;
private final String myReference;
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/IValidationSupport.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/IValidationSupport.java
index d0990842fa2..71d561c1db3 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/IValidationSupport.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/IValidationSupport.java
@@ -727,7 +727,7 @@ public interface IValidationSupport {
return this;
}
- String getCodeSystemName() {
+ public String getCodeSystemName() {
return myCodeSystemName;
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
index d2c87d99ab5..a3baa55eb45 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/api/Pointcut.java
@@ -2174,6 +2174,32 @@ public enum Pointcut implements IPointcut {
"ca.uhn.fhir.rest.api.server.RequestDetails",
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails"),
+ /**
+ * Storage Hook:
+ * Invoked when a partition has been deleted, typically meaning the $partition-management-delete-partition
+ * operation has been invoked.
+ *
+ * This hook will only be called if
+ * partitioning is enabled in the JPA server.
+ *
+ *
+ * Hooks may accept the following parameters:
+ *
+ *
+ *
+ * ca.uhn.fhir.interceptor.model.RequestPartitionId - The ID of the partition that was deleted.
+ *
+ *
+ *
+ * Hooks must return void.
+ *
+ */
+ STORAGE_PARTITION_DELETED(
+ // Return type
+ void.class,
+ // Params
+ "ca.uhn.fhir.interceptor.model.RequestPartitionId"),
+
/**
* Storage Hook:
* Invoked before any partition aware FHIR operation, when the selected partition has been identified (ie. after the
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java
index 0c962c63340..db4dc9cb027 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/api/Constants.java
@@ -115,6 +115,7 @@ public class Constants {
public static final String HEADER_AUTHORIZATION_VALPREFIX_BASIC = "Basic ";
public static final String HEADER_AUTHORIZATION_VALPREFIX_BEARER = "Bearer ";
public static final String HEADER_CACHE_CONTROL = "Cache-Control";
+ public static final String HEADER_CLIENT_TIMEZONE = "Timezone";
public static final String HEADER_CONTENT_DISPOSITION = "Content-Disposition";
public static final String HEADER_CONTENT_ENCODING = "Content-Encoding";
public static final String HEADER_CONTENT_LOCATION = "Content-Location";
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java
index f2c9ce61196..a2fbe76e1ad 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/param/DateRangeParam.java
@@ -112,6 +112,8 @@ public class DateRangeParam implements IQueryParameterAnd {
theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString())
.getRight());
}
+ // there is only one value; we will set it as the lower bound
+ // as a >= operation
validateAndSet(theDateParam, null);
break;
case ENDS_BEFORE:
@@ -121,6 +123,9 @@ public class DateRangeParam implements IQueryParameterAnd {
theDateParam.setValueAsString(DateUtils.getCompletedDate(theDateParam.getValueAsString())
.getLeft());
}
+
+ // there is only one value; we will set it as the upper bound
+ // as a <= operation
validateAndSet(null, theDateParam);
break;
default:
@@ -318,8 +323,8 @@ public class DateRangeParam implements IQueryParameterAnd {
case NOT_EQUAL:
break;
case LESSTHAN:
- case APPROXIMATE:
case LESSTHAN_OR_EQUALS:
+ case APPROXIMATE:
case ENDS_BEFORE:
throw new IllegalStateException(
Msg.code(1926) + "Invalid lower bound comparator: " + myLowerBound.getPrefix());
@@ -383,9 +388,9 @@ public class DateRangeParam implements IQueryParameterAnd {
case NOT_EQUAL:
case GREATERTHAN_OR_EQUALS:
break;
+ case LESSTHAN_OR_EQUALS:
case LESSTHAN:
case APPROXIMATE:
- case LESSTHAN_OR_EQUALS:
case ENDS_BEFORE:
throw new IllegalStateException(
Msg.code(1928) + "Invalid lower bound comparator: " + theLowerBound.getPrefix());
@@ -470,10 +475,13 @@ public class DateRangeParam implements IQueryParameterAnd {
if (myLowerBound != null && myLowerBound.getMissing() != null) {
retVal.add((myLowerBound));
} else {
- if (myLowerBound != null && !myLowerBound.isEmpty()) {
+ boolean hasLowerBound = myLowerBound != null && !myLowerBound.isEmpty();
+ boolean hasUpperBound = myUpperBound != null && !myUpperBound.isEmpty();
+
+ if (hasLowerBound) {
retVal.add((myLowerBound));
}
- if (myUpperBound != null && !myUpperBound.isEmpty()) {
+ if (hasUpperBound) {
retVal.add((myUpperBound));
}
}
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateUtils.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateUtils.java
index e88047132de..2c7bd3c1072 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateUtils.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/DateUtils.java
@@ -20,6 +20,7 @@
package ca.uhn.fhir.util;
import ca.uhn.fhir.i18n.Msg;
+import com.google.common.base.Preconditions;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
@@ -28,11 +29,20 @@ import java.lang.ref.SoftReference;
import java.text.ParseException;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
+import java.time.LocalDateTime;
+import java.time.Month;
+import java.time.YearMonth;
+import java.time.format.DateTimeFormatter;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+import java.time.temporal.TemporalField;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
import java.util.TimeZone;
/**
@@ -93,6 +103,89 @@ public final class DateUtils {
*/
private DateUtils() {}
+ /**
+ * Calculate a LocalDateTime with any missing date/time data points defaulting to the earliest values (ex 0 for hour)
+ * from a TemporalAccessor or empty if it doesn't contain a year.
+ *
+ * @param theTemporalAccessor The TemporalAccessor containing date/time information
+ * @return A LocalDateTime or empty
+ */
+ public static Optional extractLocalDateTimeForRangeStartOrEmpty(
+ TemporalAccessor theTemporalAccessor) {
+ if (theTemporalAccessor.isSupported(ChronoField.YEAR)) {
+ final int year = theTemporalAccessor.get(ChronoField.YEAR);
+ final Month month = Month.of(getTimeUnitIfSupported(theTemporalAccessor, ChronoField.MONTH_OF_YEAR, 1));
+ final int day = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.DAY_OF_MONTH, 1);
+ final int hour = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.HOUR_OF_DAY, 0);
+ final int minute = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.MINUTE_OF_HOUR, 0);
+ final int seconds = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.SECOND_OF_MINUTE, 0);
+
+ return Optional.of(LocalDateTime.of(year, month, day, hour, minute, seconds));
+ }
+
+ return Optional.empty();
+ }
+
+ /**
+ * Calculate a LocalDateTime with any missing date/time data points defaulting to the latest values (ex 23 for hour)
+ * from a TemporalAccessor or empty if it doesn't contain a year.
+ *
+ * @param theTemporalAccessor The TemporalAccessor containing date/time information
+ * @return A LocalDateTime or empty
+ */
+ public static Optional extractLocalDateTimeForRangeEndOrEmpty(TemporalAccessor theTemporalAccessor) {
+ if (theTemporalAccessor.isSupported(ChronoField.YEAR)) {
+ final int year = theTemporalAccessor.get(ChronoField.YEAR);
+ final Month month = Month.of(getTimeUnitIfSupported(theTemporalAccessor, ChronoField.MONTH_OF_YEAR, 12));
+ final int day = getTimeUnitIfSupported(
+ theTemporalAccessor,
+ ChronoField.DAY_OF_MONTH,
+ YearMonth.of(year, month).atEndOfMonth().getDayOfMonth());
+ final int hour = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.HOUR_OF_DAY, 23);
+ final int minute = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.MINUTE_OF_HOUR, 59);
+ final int seconds = getTimeUnitIfSupported(theTemporalAccessor, ChronoField.SECOND_OF_MINUTE, 59);
+
+ return Optional.of(LocalDateTime.of(year, month, day, hour, minute, seconds));
+ }
+
+ return Optional.empty();
+ }
+
+ /**
+ * With the provided DateTimeFormatter, parse a date time String or return empty if the String doesn't correspond
+ * to the formatter.
+ *
+ * @param theDateTimeString A date/time String in some date format
+ * @param theSupportedDateTimeFormatter The DateTimeFormatter we expect corresponds to the String
+ * @return The parsed TemporalAccessor or empty
+ */
+ public static Optional parseDateTimeStringIfValid(
+ String theDateTimeString, DateTimeFormatter theSupportedDateTimeFormatter) {
+ Objects.requireNonNull(theSupportedDateTimeFormatter);
+ Preconditions.checkArgument(StringUtils.isNotBlank(theDateTimeString));
+
+ try {
+ return Optional.of(theSupportedDateTimeFormatter.parse(theDateTimeString));
+ } catch (Exception exception) {
+ return Optional.empty();
+ }
+ }
+
+ private static int getTimeUnitIfSupported(
+ TemporalAccessor theTemporalAccessor, TemporalField theTemporalField, int theDefaultValue) {
+ return getTimeUnitIfSupportedOrEmpty(theTemporalAccessor, theTemporalField)
+ .orElse(theDefaultValue);
+ }
+
+ private static Optional getTimeUnitIfSupportedOrEmpty(
+ TemporalAccessor theTemporalAccessor, TemporalField theTemporalField) {
+ if (theTemporalAccessor.isSupported(theTemporalField)) {
+ return Optional.of(theTemporalAccessor.get(theTemporalField));
+ }
+
+ return Optional.empty();
+ }
+
/**
* A factory for {@link SimpleDateFormat}s. The instances are stored in a
* threadlocal way because SimpleDateFormat is not thread safe as noted in
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/HapiExtensions.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/HapiExtensions.java
index 4ad9769b85c..93e5086cd95 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/HapiExtensions.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/HapiExtensions.java
@@ -110,6 +110,12 @@ public class HapiExtensions {
public static final String EXT_SP_UNIQUE = "http://hapifhir.io/fhir/StructureDefinition/sp-unique";
+ /**
+ * URL for extension on a Search Parameter which determines whether it should be enabled for searching for resources
+ */
+ public static final String EXT_SEARCHPARAM_ENABLED_FOR_SEARCHING =
+ "http://hapifhir.io/fhir/StructureDefinition/searchparameter-enabled-for-searching";
+
/**
* URL for extension on a Phonetic String SearchParameter indicating that text values should be phonetically indexed with the named encoder
*/
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java
index 9a56f93a1e4..e16fbe1e936 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/JsonUtil.java
@@ -34,6 +34,7 @@ import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.PropertyWriter;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
+import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import jakarta.annotation.Nonnull;
import java.io.IOException;
@@ -60,16 +61,22 @@ public class JsonUtil {
ourMapperPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
ourMapperPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER);
ourMapperPrettyPrint.enable(SerializationFeature.INDENT_OUTPUT);
+ // Needed to handle ZonedDateTime
+ ourMapperPrettyPrint.registerModule(new JavaTimeModule());
ourMapperNonPrettyPrint = new ObjectMapper();
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
ourMapperNonPrettyPrint.setFilterProvider(SENSITIVE_DATA_FILTER_PROVIDER);
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
+ // Needed to handle ZonedDateTime
+ ourMapperNonPrettyPrint.registerModule(new JavaTimeModule());
ourMapperIncludeSensitive = new ObjectMapper();
ourMapperIncludeSensitive.setFilterProvider(SHOW_ALL_DATA_FILTER_PROVIDER);
ourMapperIncludeSensitive.setSerializationInclusion(JsonInclude.Include.NON_NULL);
ourMapperIncludeSensitive.disable(SerializationFeature.INDENT_OUTPUT);
+ // Needed to handle ZonedDateTime
+ ourMapperIncludeSensitive.registerModule(new JavaTimeModule());
}
/**
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java
index b1c8c25ed76..6f1ced0a232 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/ParametersUtil.java
@@ -43,6 +43,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
+import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
@@ -58,20 +59,20 @@ public class ParametersUtil {
public static Optional getNamedParameterValueAsString(
FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
Function, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null);
- return extractNamedParameters(theCtx, theParameters, theParameterName, mapper).stream()
+ return extractNamedParameterValues(theCtx, theParameters, theParameterName, mapper).stream()
.findFirst();
}
public static List getNamedParameterValuesAsString(
FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
Function, String> mapper = t -> defaultIfBlank(t.getValueAsString(), null);
- return extractNamedParameters(theCtx, theParameters, theParameterName, mapper);
+ return extractNamedParameterValues(theCtx, theParameters, theParameterName, mapper);
}
public static List getNamedParameterValuesAsInteger(
FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
Function, Integer> mapper = t -> (Integer) t.getValue();
- return extractNamedParameters(theCtx, theParameters, theParameterName, mapper);
+ return extractNamedParameterValues(theCtx, theParameters, theParameterName, mapper);
}
public static Optional getNamedParameterValueAsInteger(
@@ -80,6 +81,19 @@ public class ParametersUtil {
.findFirst();
}
+ /**
+ * Returns the resource within a parameter.
+ * @param theCtx thr FHIR context
+ * @param theParameters the parameters instance where to look for the resource
+ * @param theParameterName the parameter name
+ * @return the resource
+ */
+ public static Optional getNamedParameterResource(
+ FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
+ return extractNamedParameterResources(theCtx, theParameters, theParameterName).stream()
+ .findFirst();
+ }
+
public static Optional getNamedParameter(
FhirContext theCtx, IBaseResource theParameters, String theParameterName) {
return getNamedParameters(theCtx, theParameters, theParameterName).stream()
@@ -153,7 +167,7 @@ public class ParametersUtil {
.map(t -> (Integer) t);
}
- private static List extractNamedParameters(
+ private static List extractNamedParameterValues(
FhirContext theCtx,
IBaseParameters theParameters,
String theParameterName,
@@ -170,7 +184,25 @@ public class ParametersUtil {
.filter(t -> t instanceof IPrimitiveType>)
.map(t -> ((IPrimitiveType>) t))
.map(theMapper)
- .filter(t -> t != null)
+ .filter(Objects::nonNull)
+ .forEach(retVal::add);
+ }
+ return retVal;
+ }
+
+ private static List extractNamedParameterResources(
+ FhirContext theCtx, IBaseParameters theParameters, String theParameterName) {
+ List retVal = new ArrayList<>();
+
+ List namedParameters = getNamedParameters(theCtx, theParameters, theParameterName);
+ for (IBase nextParameter : namedParameters) {
+ BaseRuntimeElementCompositeDefinition> nextParameterDef =
+ (BaseRuntimeElementCompositeDefinition>) theCtx.getElementDefinition(nextParameter.getClass());
+ BaseRuntimeChildDefinition resourceChild = nextParameterDef.getChildByName("resource");
+ List resourceValues = resourceChild.getAccessor().getValues(nextParameter);
+ resourceValues.stream()
+ .filter(IBaseResource.class::isInstance)
+ .map(t -> ((IBaseResource) t))
.forEach(retVal::add);
}
return retVal;
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtil.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtil.java
index c363f97b6d8..49708447990 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtil.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/TerserUtil.java
@@ -26,10 +26,12 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeChildChoiceDefinition;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.i18n.Msg;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.tuple.Triple;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseBackboneElement;
+import org.hl7.fhir.instance.model.api.IBaseHasExtensions;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
@@ -93,6 +95,8 @@ public final class TerserUtil {
private static final Logger ourLog = getLogger(TerserUtil.class);
private static final String EQUALS_DEEP = "equalsDeep";
+ public static final String DATA_ABSENT_REASON_EXTENSION_URI =
+ "http://hl7.org/fhir/StructureDefinition/data-absent-reason";
private TerserUtil() {}
@@ -266,6 +270,15 @@ public final class TerserUtil {
return theItems.stream().anyMatch(i -> equals(i, theItem, method));
}
+ private static boolean hasDataAbsentReason(IBase theItem) {
+ if (theItem instanceof IBaseHasExtensions) {
+ IBaseHasExtensions hasExtensions = (IBaseHasExtensions) theItem;
+ return hasExtensions.getExtension().stream()
+ .anyMatch(t -> StringUtils.equals(t.getUrl(), DATA_ABSENT_REASON_EXTENSION_URI));
+ }
+ return false;
+ }
+
/**
* Merges all fields on the provided instance. theTo will contain a union of all values from theFrom
* instance and theTo instance.
@@ -695,24 +708,36 @@ public final class TerserUtil {
BaseRuntimeChildDefinition childDefinition,
List theFromFieldValues,
List theToFieldValues) {
- for (IBase theFromFieldValue : theFromFieldValues) {
- if (contains(theFromFieldValue, theToFieldValues)) {
+ if (!theFromFieldValues.isEmpty() && theToFieldValues.stream().anyMatch(TerserUtil::hasDataAbsentReason)) {
+ // If the to resource has a data absent reason, and there is potentially real data incoming
+ // in the from resource, we should clear the data absent reason because it won't be absent anymore.
+ theToFieldValues = removeDataAbsentReason(theTo, childDefinition, theToFieldValues);
+ }
+
+ for (IBase fromFieldValue : theFromFieldValues) {
+ if (contains(fromFieldValue, theToFieldValues)) {
continue;
}
- IBase newFieldValue = newElement(theTerser, childDefinition, theFromFieldValue, null);
- if (theFromFieldValue instanceof IPrimitiveType) {
+ if (hasDataAbsentReason(fromFieldValue) && !theToFieldValues.isEmpty()) {
+ // if the from field value asserts a reason the field isn't populated, but the to field is populated,
+ // we don't want to overwrite real data with the extension
+ continue;
+ }
+
+ IBase newFieldValue = newElement(theTerser, childDefinition, fromFieldValue, null);
+ if (fromFieldValue instanceof IPrimitiveType) {
try {
- Method copyMethod = getMethod(theFromFieldValue, "copy");
+ Method copyMethod = getMethod(fromFieldValue, "copy");
if (copyMethod != null) {
- newFieldValue = (IBase) copyMethod.invoke(theFromFieldValue, new Object[] {});
+ newFieldValue = (IBase) copyMethod.invoke(fromFieldValue, new Object[] {});
}
} catch (Throwable t) {
- ((IPrimitiveType) newFieldValue)
- .setValueAsString(((IPrimitiveType) theFromFieldValue).getValueAsString());
+ ((IPrimitiveType>) newFieldValue)
+ .setValueAsString(((IPrimitiveType>) fromFieldValue).getValueAsString());
}
} else {
- theTerser.cloneInto(theFromFieldValue, newFieldValue, true);
+ theTerser.cloneInto(fromFieldValue, newFieldValue, true);
}
try {
@@ -724,6 +749,21 @@ public final class TerserUtil {
}
}
+ private static List removeDataAbsentReason(
+ IBaseResource theResource, BaseRuntimeChildDefinition theFieldDefinition, List theFieldValues) {
+ for (int i = 0; i < theFieldValues.size(); i++) {
+ if (hasDataAbsentReason(theFieldValues.get(i))) {
+ try {
+ theFieldDefinition.getMutator().remove(theResource, i);
+ } catch (UnsupportedOperationException e) {
+ // the field must be single-valued, just clear it
+ theFieldDefinition.getMutator().setValue(theResource, null);
+ }
+ }
+ }
+ return theFieldDefinition.getAccessor().getValues(theResource);
+ }
+
/**
* Clones the specified resource.
*
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java
index bf46c7fe3bf..8d4867594cf 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java
+++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/util/VersionEnum.java
@@ -162,9 +162,16 @@ public enum VersionEnum {
V7_3_0,
V7_4_0,
+ V7_4_1,
+ V7_4_2,
+ V7_4_3,
+ V7_4_4,
+ V7_4_5,
V7_5_0,
- V7_6_0;
+ V7_6_0,
+ V7_7_0,
+ V7_8_0;
public static VersionEnum latestVersion() {
VersionEnum[] values = VersionEnum.values();
diff --git a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
index 662ce8daf21..f338fc8fa1c 100644
--- a/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
+++ b/hapi-fhir-base/src/main/resources/ca/uhn/fhir/i18n/hapi-messages.properties
@@ -133,6 +133,7 @@ ca.uhn.fhir.jpa.dao.BaseStorageDao.successfulTimingSuffix=Took {0}ms.
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceNotExisting=Not deleted, resource {0} does not exist.
ca.uhn.fhir.jpa.dao.BaseStorageDao.deleteResourceAlreadyDeleted=Not deleted, resource {0} was already deleted.
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameter=Unknown search parameter "{0}" for resource type "{1}". Valid search parameters for this search are: {2}
+ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSearchParameterNotEnabledForSearch=Search parameter "{0}" for resource type "{1}" is not active for searching. Valid search parameters for this search are: {2}
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameter=Unknown _sort parameter value "{0}" for resource type "{1}" (Note: sort parameters values must use a valid Search Parameter). Valid values for this search are: {2}
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidSortParameterTooManyChains=Invalid _sort expression, can not chain more than once in a sort expression: {0}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/DateUtilsTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/DateUtilsTest.java
new file mode 100644
index 00000000000..521691e59eb
--- /dev/null
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/DateUtilsTest.java
@@ -0,0 +1,179 @@
+package ca.uhn.fhir.util;
+
+import jakarta.annotation.Nullable;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
+import java.time.LocalDateTime;
+import java.time.Month;
+import java.time.format.DateTimeFormatter;
+import java.time.temporal.TemporalAccessor;
+import java.util.Optional;
+import java.util.stream.Stream;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+class DateUtilsTest {
+
+ private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY = DateTimeFormatter.ofPattern("yyyy");
+ private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM = DateTimeFormatter.ofPattern("yyyy-MM");
+ private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM_DD = DateTimeFormatter.ISO_DATE;
+ private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM_DD_HH = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH");
+ private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM_DD_HH_MM = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm");
+ private static final DateTimeFormatter DATE_TIME_FORMATTER_YYYY_MM_DD_HH_MM_SS = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
+
+ private static Stream extractLocalDateTimeStartIfValidParams() {
+ return Stream.of(
+ Arguments.of(
+ getTemporalAccessor("2024"),
+ LocalDateTime.of(2024, Month.JANUARY, 1, 0, 0, 0)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-02"),
+ LocalDateTime.of(2023, Month.FEBRUARY, 1, 0, 0, 0)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2022-09"),
+ LocalDateTime.of(2022, Month.SEPTEMBER, 1, 0, 0, 0)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2021-03-24"),
+ LocalDateTime.of(2021, Month.MARCH, 24, 0, 0, 0)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2024-10-23"),
+ LocalDateTime.of(2024, Month.OCTOBER, 23, 0, 0, 0)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2024-08-24T12"),
+ LocalDateTime.of(2024, Month.AUGUST, 24, 12, 0, 0)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2024-11-24T12:35"),
+ LocalDateTime.of(2024, Month.NOVEMBER, 24, 12, 35, 0)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2024-09-24T12:35:47"),
+ LocalDateTime.of(2024, Month.SEPTEMBER, 24, 12, 35, 47)
+ )
+ );
+ }
+
+ private static Stream extractLocalDateTimeEndIfValidParams() {
+ return Stream.of(
+ Arguments.of(
+ getTemporalAccessor("2024"),
+ LocalDateTime.of(2024, Month.DECEMBER, 31, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-01"),
+ LocalDateTime.of(2023, Month.JANUARY, 31, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-02"),
+ LocalDateTime.of(2023, Month.FEBRUARY, 28, 23, 59, 59)
+ ),
+ // Leap year
+ Arguments.of(
+ getTemporalAccessor("2024-02"),
+ LocalDateTime.of(2024, Month.FEBRUARY, 29, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-03"),
+ LocalDateTime.of(2023, Month.MARCH, 31, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-04"),
+ LocalDateTime.of(2023, Month.APRIL, 30, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-05"),
+ LocalDateTime.of(2023, Month.MAY, 31, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-06"),
+ LocalDateTime.of(2023, Month.JUNE, 30, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-07"),
+ LocalDateTime.of(2023, Month.JULY, 31, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-08"),
+ LocalDateTime.of(2023, Month.AUGUST, 31, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2023-09"),
+ LocalDateTime.of(2023, Month.SEPTEMBER, 30, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2022-10"),
+ LocalDateTime.of(2022, Month.OCTOBER, 31, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2022-11"),
+ LocalDateTime.of(2022, Month.NOVEMBER, 30, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2022-12"),
+ LocalDateTime.of(2022, Month.DECEMBER, 31, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2021-03-24"),
+ LocalDateTime.of(2021, Month.MARCH, 24, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2024-10-23"),
+ LocalDateTime.of(2024, Month.OCTOBER, 23, 23, 59, 59)
+ ),
+ Arguments.of(
+ getTemporalAccessor("2024-09-24T12:35:47"),
+ LocalDateTime.of(2024, Month.SEPTEMBER, 24, 12, 35, 47)
+ )
+ );
+ }
+
+ @ParameterizedTest
+ @MethodSource("extractLocalDateTimeStartIfValidParams")
+ void extractLocalDateTimeStartIfValid (
+ TemporalAccessor theTemporalAccessor,
+ @Nullable LocalDateTime theExpectedResult) {
+ assertThat(DateUtils.extractLocalDateTimeForRangeStartOrEmpty(theTemporalAccessor))
+ .isEqualTo(Optional.ofNullable(theExpectedResult));
+ }
+
+ @ParameterizedTest
+ @MethodSource("extractLocalDateTimeEndIfValidParams")
+ void extractLocalDateTimeEndIfValid (
+ TemporalAccessor theTemporalAccessor,
+ @Nullable LocalDateTime theExpectedResult) {
+ assertThat(DateUtils.extractLocalDateTimeForRangeEndOrEmpty(theTemporalAccessor))
+ .isEqualTo(Optional.ofNullable(theExpectedResult));
+ }
+
+ private static TemporalAccessor getTemporalAccessor(String theDateTimeString) {
+ final DateTimeFormatter dateTimeFormatter = getDateTimeFormatter(theDateTimeString);
+
+ assertThat(dateTimeFormatter)
+ .withFailMessage("Cannot find DateTimeFormatter for: " + theDateTimeString)
+ .isNotNull();
+
+ return DateUtils.parseDateTimeStringIfValid(
+ theDateTimeString,
+ dateTimeFormatter
+ ).orElseThrow(() -> new IllegalArgumentException("Unable to parse: " + theDateTimeString));
+ }
+
+ private static DateTimeFormatter getDateTimeFormatter(String theDateTimeString) {
+ return switch (theDateTimeString.length()) {
+ case 4 -> DATE_TIME_FORMATTER_YYYY;
+ case 7 -> DATE_TIME_FORMATTER_YYYY_MM;
+ case 10 -> DATE_TIME_FORMATTER_YYYY_MM_DD;
+ case 13 -> DATE_TIME_FORMATTER_YYYY_MM_DD_HH;
+ case 16 -> DATE_TIME_FORMATTER_YYYY_MM_DD_HH_MM;
+ case 19 -> DATE_TIME_FORMATTER_YYYY_MM_DD_HH_MM_SS;
+ default -> null;
+ };
+ }
+}
diff --git a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/TaskChunkerTest.java b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/TaskChunkerTest.java
index aff2b3b9b89..38df69f017f 100644
--- a/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/TaskChunkerTest.java
+++ b/hapi-fhir-base/src/test/java/ca/uhn/fhir/util/TaskChunkerTest.java
@@ -77,5 +77,4 @@ public class TaskChunkerTest {
Arguments.of(List.of(1,2,3,4,5,6,7,8,9), List.of(List.of(1,2,3), List.of(4,5,6), List.of(7,8,9)))
);
}
-
}
diff --git a/hapi-fhir-bom/pom.xml b/hapi-fhir-bom/pom.xml
index 22b8724f7e8..a463622aa98 100644
--- a/hapi-fhir-bom/pom.xml
+++ b/hapi-fhir-bom/pom.xml
@@ -4,7 +4,7 @@
4.0.0ca.uhn.hapi.fhirhapi-fhir-bom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOTpomHAPI FHIR BOM
@@ -12,7 +12,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-checkstyle/pom.xml b/hapi-fhir-checkstyle/pom.xml
index e3a44edd071..354cd9f55a3 100644
--- a/hapi-fhir-checkstyle/pom.xml
+++ b/hapi-fhir-checkstyle/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-checkstyle/src/checkstyle/hapi-base-checkstyle.xml b/hapi-fhir-checkstyle/src/checkstyle/hapi-base-checkstyle.xml
index c74e8b7e01c..11c9b669ad4 100644
--- a/hapi-fhir-checkstyle/src/checkstyle/hapi-base-checkstyle.xml
+++ b/hapi-fhir-checkstyle/src/checkstyle/hapi-base-checkstyle.xml
@@ -64,4 +64,12 @@
+
+
+
+
+
+
+
+
diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
index 64ba6637ea2..df9d39a6ad7 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
index c6e0de92d36..92369da4db9 100644
--- a/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
+++ b/hapi-fhir-cli/hapi-fhir-cli-app/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhirhapi-fhir-cli
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-cli/pom.xml b/hapi-fhir-cli/pom.xml
index 25ee977592a..48d765c5c1e 100644
--- a/hapi-fhir-cli/pom.xml
+++ b/hapi-fhir-cli/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-client-okhttp/pom.xml b/hapi-fhir-client-okhttp/pom.xml
index 5e6bb12d288..bb0ed13dbc2 100644
--- a/hapi-fhir-client-okhttp/pom.xml
+++ b/hapi-fhir-client-okhttp/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-client-okhttp/src/test/java/ca/uhn/fhir/okhttp/GenericOkHttpClientDstu2Test.java b/hapi-fhir-client-okhttp/src/test/java/ca/uhn/fhir/okhttp/GenericOkHttpClientDstu2Test.java
index b8ceb4efbf3..0060cf2c4aa 100644
--- a/hapi-fhir-client-okhttp/src/test/java/ca/uhn/fhir/okhttp/GenericOkHttpClientDstu2Test.java
+++ b/hapi-fhir-client-okhttp/src/test/java/ca/uhn/fhir/okhttp/GenericOkHttpClientDstu2Test.java
@@ -1357,7 +1357,9 @@ public class GenericOkHttpClientDstu2Test {
.returnBundle(Bundle.class)
.execute();
- assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", MY_SERVLET.ourRequestUri);
+ assertThat(MY_SERVLET.ourRequestUri).isIn(
+ ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
+ ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
// assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() +
@@ -1391,7 +1393,10 @@ public class GenericOkHttpClientDstu2Test {
.execute();
assertThat(MY_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
- assertThat(MY_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname");
+ assertThat(MY_SERVLET.ourRequestUri).satisfiesAnyOf(
+ s -> assertThat(s).contains("_elements=identifier%2Cname"),
+ s -> assertThat(s).contains("_elements=name%2Cidentifier")
+ );
assertThat(MY_SERVLET.ourRequestUri).doesNotContain("_format=json");
// assertThat(MY_SERVLET.ourRequestUri,
diff --git a/hapi-fhir-client/pom.xml b/hapi-fhir-client/pom.xml
index 53a25a1d95d..5231b5d4d8e 100644
--- a/hapi-fhir-client/pom.xml
+++ b/hapi-fhir-client/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-converter/pom.xml b/hapi-fhir-converter/pom.xml
index 8ee8cee53fa..8298cfebe05 100644
--- a/hapi-fhir-converter/pom.xml
+++ b/hapi-fhir-converter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-dist/pom.xml b/hapi-fhir-dist/pom.xml
index ed0a4dd5507..bbacd405073 100644
--- a/hapi-fhir-dist/pom.xml
+++ b/hapi-fhir-dist/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-fhir
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../pom.xml
diff --git a/hapi-fhir-docs/pom.xml b/hapi-fhir-docs/pom.xml
index 095b30baec8..65bb05ae9ee 100644
--- a/hapi-fhir-docs/pom.xml
+++ b/hapi-fhir-docs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleFetcher.java b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleFetcher.java
index 8854253c283..7d6d9bc4e8f 100644
--- a/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleFetcher.java
+++ b/hapi-fhir-docs/src/main/java/ca/uhn/hapi/fhir/docs/BundleFetcher.java
@@ -39,7 +39,7 @@ public class BundleFetcher {
// START SNIPPET: loadAll
// Create a context and a client
FhirContext ctx = FhirContext.forR4();
- String serverBase = "http://hapi.fhr.org/baseR4";
+ String serverBase = "http://hapi.fhir.org/baseR4";
IGenericClient client = ctx.newRestfulGenericClient(serverBase);
// We'll populate this list
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/version.yaml
index ddc0f7d4b84..d680c8f6389 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/version.yaml
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/version.yaml
@@ -1,3 +1,3 @@
---
-release-date: "2023-02-18"
+release-date: "2024-02-18"
codename: "Apollo"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/version.yaml
index 9e4b8d89de0..73d56cc6a9d 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/version.yaml
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_2_0/version.yaml
@@ -1,3 +1,3 @@
---
-release-date: "2023-05-18"
+release-date: "2024-05-18"
codename: "Borealis"
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/QueryParameterUtilsTest.java b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_2/upgrade.md
similarity index 100%
rename from hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/QueryParameterUtilsTest.java
rename to hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_2/upgrade.md
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_2/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_2/version.yaml
new file mode 100644
index 00000000000..d2902954939
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_2/version.yaml
@@ -0,0 +1,3 @@
+---
+release-date: "2024-09-20"
+codename: "Copernicus"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_3/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_3/upgrade.md
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_3/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_3/version.yaml
new file mode 100644
index 00000000000..c7e8946ba75
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_3/version.yaml
@@ -0,0 +1,3 @@
+---
+release-date: "2024-09-30"
+codename: "Copernicus"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_4/6363-updating-fhir-core-dependency.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_4/6363-updating-fhir-core-dependency.yaml
new file mode 100644
index 00000000000..00a643adbd5
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_4/6363-updating-fhir-core-dependency.yaml
@@ -0,0 +1,4 @@
+---
+type: fix
+issue: 6363
+title: "This release updates the org.hl7.fhir core dependency up to 6.3.23, in order to patch [CVE-2024-45294](https://nvd.nist.gov/vuln/detail/CVE-2024-45294)."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_4/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_4/upgrade.md
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_4/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_4/version.yaml
new file mode 100644
index 00000000000..f583ee20536
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_4/version.yaml
@@ -0,0 +1,3 @@
+---
+release-date: "2024-10-17"
+codename: "Copernicus"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_5/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_5/upgrade.md
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_5/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_5/version.yaml
new file mode 100644
index 00000000000..9c3c1e39eee
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_4_5/version.yaml
@@ -0,0 +1,3 @@
+---
+release-date: "2024-10-21"
+codename: "Copernicus"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6210-add-chunk-id-to-delete-expunge-log-msg.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6210-add-chunk-id-to-delete-expunge-log-msg.yaml
new file mode 100644
index 00000000000..e526840512a
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6210-add-chunk-id-to-delete-expunge-log-msg.yaml
@@ -0,0 +1,6 @@
+---
+type: add
+issue: 6210
+jira: SMILE-8428
+title: "Batch instance ID and chunk ID have been added to the logging context so that they can be automatically added to
+batch-related messages in the log."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6216-fulltext-searching-not-returning-expected-results.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6216-fulltext-searching-not-returning-expected-results.yaml
index 3a0a64a120c..10b67b55feb 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6216-fulltext-searching-not-returning-expected-results.yaml
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6216-fulltext-searching-not-returning-expected-results.yaml
@@ -1,6 +1,6 @@
---
type: fix
-backport: 7.2.3
+backport: 7.2.3,7.4.2
issue: 6216
jira: SMILE-8806
title: "Previously, searches combining the `_text` query parameter (using Lucene/Elasticsearch) with query parameters
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6252-improve-validator-performance.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6252-improve-validator-performance.yaml
new file mode 100644
index 00000000000..a27d92ec4b7
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6252-improve-validator-performance.yaml
@@ -0,0 +1,6 @@
+---
+type: perf
+issue: 6253
+title: "A cache has been added to the validation services layer which results
+ in improved validation performance. Thanks to Max Bureck for the
+ contribution!"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6285-batch2-reindex-version2-added.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6285-batch2-reindex-version2-added.yaml
new file mode 100644
index 00000000000..2e48e6e7bd9
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6285-batch2-reindex-version2-added.yaml
@@ -0,0 +1,17 @@
+---
+type: fix
+issue: 6285
+title: "Updated the Reindex Batch2 job to allow
+ for an additional step that will check to ensure
+ that no pending 'reindex' work is needed.
+ This was done to prevent a bug in which
+ value set expansion would not return all
+ the existing CodeSystem Concepts after
+ a reindex call, due to some of the concepts
+ being deferred to future job runs.
+
+ As such, `$reindex` operations on CodeSystems
+ will no longer result in incorrect value set
+ expansion when such an expansion is called
+ 'too soon' after a $reindex operation.
+"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6313-add-pointcut-for-delete-partition.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6313-add-pointcut-for-delete-partition.yaml
new file mode 100644
index 00000000000..53a5736a15d
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6313-add-pointcut-for-delete-partition.yaml
@@ -0,0 +1,6 @@
+---
+type: add
+issue: 6313
+jira: SMILE-8847
+title: "The `STORAGE_PARTITION_DELETED` pointcut has been added and will be called upon deleting a partition
+using the `$partition-management-delete-partition` operation."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6317-fix-resource-duplication-for-composite-unique-sp-with-date-time-component.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6317-fix-resource-duplication-for-composite-unique-sp-with-date-time-component.yaml
index f1e379d654c..a38bc6e1c7d 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6317-fix-resource-duplication-for-composite-unique-sp-with-date-time-component.yaml
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6317-fix-resource-duplication-for-composite-unique-sp-with-date-time-component.yaml
@@ -1,6 +1,7 @@
---
type: fix
issue: 6317
+backport: 7.4.3
title: "Previously, defining a unique combo Search Parameter with the DateTime component and submitting multiple
resources with the same dateTime element (e.g. Observation.effectiveDateTime) resulted in duplicate resource creation.
This has been fixed."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6323-concurrent-constructor-access.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6323-concurrent-constructor-access.yaml
new file mode 100644
index 00000000000..51b32ae0b6b
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6323-concurrent-constructor-access.yaml
@@ -0,0 +1,5 @@
+---
+type: perf
+issue: 6323
+title: "A synchronization choke point was removed from the model object initialization code, reducing the risk of
+multi-thread contention."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6325-partitioned-search.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6325-partitioned-search.yaml
new file mode 100644
index 00000000000..b77268b15a9
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6325-partitioned-search.yaml
@@ -0,0 +1,5 @@
+---
+type: add
+issue: 6325
+title: "A new configuration option, `PartitionSettings#setPartitionIdsInPrimaryKeys(boolean)` configures the query engine
+ to include the partitioning column in search query joins."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6339-drop-index-nondefault-schema.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6339-drop-index-nondefault-schema.yaml
new file mode 100644
index 00000000000..9f3dc6119c7
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6339-drop-index-nondefault-schema.yaml
@@ -0,0 +1,6 @@
+---
+type: fix
+issue: 6339
+jira: SMILE-9044
+title: "Fixed a bug in migrations when using Postgres when using the non-default schema. If a migration attempted to drop a primary key, the generated SQL would only ever target the `public` schema.
+This has been corrected, and the current schema is now used, with `public` as a fallback. Thanks to Adrienne Sox for the contribution!"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6341-expand-translation-cache.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6341-expand-translation-cache.yaml
new file mode 100644
index 00000000000..e0e592c0d4a
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6341-expand-translation-cache.yaml
@@ -0,0 +1,6 @@
+---
+type: change
+issue: 6341
+title: "The CachingValidationSupport cache for concept translations will
+ now keep up to 500000 translations instead of the previous 5000.
+ This will be made configurable in a future release."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6345-date-searching-performance-tweaking.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6345-date-searching-performance-tweaking.yaml
new file mode 100644
index 00000000000..7a1e6273936
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6345-date-searching-performance-tweaking.yaml
@@ -0,0 +1,8 @@
+---
+type: perf
+issue: 6345
+title: "Date searches using equality would perform badly as the query planner
+ does not know that our LOW_VALUE columns are always < HIGH_VALUE
+ columns, and HIGH_VALUE is always > LOW_VALUE columns.
+ These queries have been fixed to account for this.
+"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6357-update-cr-version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6357-update-cr-version.yaml
new file mode 100644
index 00000000000..83d64d404e5
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6357-update-cr-version.yaml
@@ -0,0 +1,6 @@
+---
+type: add
+issue: 6357
+title: Upgrade the Clinical Reasoning module to the latest release of 3.13.0. This update comes with several changes
+ and feature enhancements to CPG and dQM clinical-reasoning operations. Please review associated ticket and upgrade.md
+ for detailed list of changes.
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6359-remote-terminology-validation-does-not-error-for-invalid-codes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6359-remote-terminology-validation-does-not-error-for-invalid-codes.yaml
new file mode 100644
index 00000000000..eb53bd0489c
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6359-remote-terminology-validation-does-not-error-for-invalid-codes.yaml
@@ -0,0 +1,6 @@
+---
+type: fix
+issue: 6359
+backport: 7.4.4
+title: "After upgrading org.hl7.fhir.core from 6.1.2.2 to 6.3.11, the $validate-code operation stopped returning an
+error for invalid codes using remote terminology. This has been fixed."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6359-support-for-out-parameter-issues-remote-terminology-validation.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6359-support-for-out-parameter-issues-remote-terminology-validation.yaml
new file mode 100644
index 00000000000..26853bc4138
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6359-support-for-out-parameter-issues-remote-terminology-validation.yaml
@@ -0,0 +1,6 @@
+---
+type: add
+issue: 6359
+backport: 7.4.4
+title: "Remote Terminology validation has been enhanced to support output parameter `issues` for the $validate-code
+operation."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6365-fix-include-crash-on-mssql.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6365-fix-include-crash-on-mssql.yaml
new file mode 100644
index 00000000000..00bed7ee3fa
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6365-fix-include-crash-on-mssql.yaml
@@ -0,0 +1,5 @@
+---
+type: fix
+issue: 6365
+title: "A crash while executing a search with named `_include` parameters on
+ MSSQL has been fixed. Thanks to Craig McClendon for the pull request!"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6366-consent-plumbing.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6366-consent-plumbing.yaml
new file mode 100644
index 00000000000..3a3de3d1fd1
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6366-consent-plumbing.yaml
@@ -0,0 +1,4 @@
+---
+type: add
+issue: 6366
+title: "Add plumbing for combining IConsentServices with different vote tally strategies"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6370-data-absent-merge.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6370-data-absent-merge.yaml
new file mode 100644
index 00000000000..255a4f18eb2
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6370-data-absent-merge.yaml
@@ -0,0 +1,6 @@
+---
+type: add
+issue: 6370
+title: "When using the FHIR `TerserUtil` to merge two resource, if one resource has real data in a particular field,
+and the other resource has a `data-absent-reason` extension in the same field, the real data will be given
+precedence in the merged resource, and the extension will be ignored."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6372-lucene-and-jpa-search-combine-issues.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6372-lucene-and-jpa-search-combine-issues.yaml
new file mode 100644
index 00000000000..9946a6a56f4
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6372-lucene-and-jpa-search-combine-issues.yaml
@@ -0,0 +1,10 @@
+---
+type: fix
+issue: 6372
+jira: SMILE-9073
+backport: 7.4.5
+title: "Searches that combined full-text searching (i.e. `_text` or `_content`)
+ with other search parameters could fail to return all results if we encountered
+ 1600 matches against the full-text index where none of them match the rest of the query.
+ This has now been fixed.
+"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6375-add-hash-identity-token-mode.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6375-add-hash-identity-token-mode.yaml
new file mode 100644
index 00000000000..afcbe53ed49
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/6375-add-hash-identity-token-mode.yaml
@@ -0,0 +1,6 @@
+---
+type: add
+issue: 6375
+title: "A new experimental JPA setting has been added to JpaStorageSettings which
+ causes searches for token SearchParameters to include a predicate on the
+ HASH_IDENTITY column even if it is not needed because other hashes are in use."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/changes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/changes.yaml
index 7d14d84956f..c900db52e43 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/changes.yaml
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/changes.yaml
@@ -4,7 +4,7 @@
title: "The version of a few dependencies have been bumped to more recent versions
(dependent HAPI modules listed in brackets):
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/upgrade.md
index e69de29bb2d..3ddb3ad568b 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/upgrade.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_6_0/upgrade.md
@@ -0,0 +1,25 @@
+# Measures and Care Gaps
+
+## TimeZone Header
+DQM `$care-gaps` and `$evaluate-measure` will convert parameters `periodStart` and `periodEnd`
+according to a timezone supplied by the client, not the server timezone as it was previously. Clients can leverage this
+functionality by passing in a new `Timezone` header (ex: `America/Denver`). If nothing is supplied, it will default to
+UTC.
+
+## CareGaps Operation Parameters
+### Parameters removed
+Certain `$care-gaps` operation parameters have been dropped, because they are not used or likely to be implemented
+* `topic`
+* `practitioner` is now callable via `subject` parameter
+* `organization`
+* `program`
+
+### Parameters added:
+* `measureIdentifier` now is available to resolve measure resources for evaluation
+* `nonDocument` is a new optional parameter that defaults to `false` which returns standard `document` bundle for `$care-gaps`.
+If `true`, this will return summarized subject bundle with only detectedIssue.
+
+# SDC $populate operation
+
+The `subject` parameter of the `Questionnaire/$populate` operation has been changed to expect a `Reference` as specified
+in the SDC IG.
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6107-allow-sp-disabling.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6107-allow-sp-disabling.yaml
new file mode 100644
index 00000000000..b8f603a05e4
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6107-allow-sp-disabling.yaml
@@ -0,0 +1,7 @@
+---
+type: add
+issue: 6107
+title: "A new extension has been created for use on SearchParameter resources in the JPA server. This extension causes
+ a SearchParameter to be indexed, but to not be available for use in searches. This can be set when a new SP is created
+ in order to prevent it from being used before an index has been completed. See
+ [Introducing Search Parameters on Existing Data](https://smilecdr.com/docs/fhir_standard/fhir_search_custom_search_parameters.html) for more information."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6224-resolve-tag-definition-in-same-thread.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6224-resolve-tag-definition-in-same-thread.yaml
new file mode 100644
index 00000000000..169b3a8b1c1
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6224-resolve-tag-definition-in-same-thread.yaml
@@ -0,0 +1,7 @@
+---
+type: perf
+issue: 6224
+title: "The JPA server will no longer use a separate thread and database connection
+ to resolve tag definitions. This should improve performance in some cases, and
+ resolves compatibility issues for some environments. Thanks to Ibrahim (Trifork A/S)
+ for the pull request!"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6258-improve-auth-interceptor-operation-handling.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6258-improve-auth-interceptor-operation-handling.yaml
new file mode 100644
index 00000000000..54123b6f36b
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6258-improve-auth-interceptor-operation-handling.yaml
@@ -0,0 +1,10 @@
+---
+type: fix
+issue: 6258
+title: "The AuthorizationInterceptor handling for operations has been improved
+ so that operation rules now directly test the contents of response Bundle
+ or Parameters objects returned by the operation when configure to require
+ explicit response authorization. This fixes a regression in 7.4.0 where
+ operation responses could sometimes be denied even if appropriate
+ permissions were granted to view resources in a response bundle. Thanks to
+ Gijsbert van den Brink for reporting the issue with a sample test!"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6395-allow-submit-subscription-synchronously.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6395-allow-submit-subscription-synchronously.yaml
new file mode 100644
index 00000000000..a8c4d2760fe
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6395-allow-submit-subscription-synchronously.yaml
@@ -0,0 +1,7 @@
+---
+type: perf
+issue: 6395
+title: "A new configuration option has been added to `SubsciptionSubmitterConfig` which
+ causes Subscription resources to be submitted to the processing queue synchronously
+ instead of asynchronously as all other resources are. This is useful for cases where
+ subscriptions need to be activated quickly. Thanks to Michal Sevcik for the contribution!"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6398-add-author-search-to-npm-search.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6398-add-author-search-to-npm-search.yaml
new file mode 100644
index 00000000000..57b704a809d
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6398-add-author-search-to-npm-search.yaml
@@ -0,0 +1,5 @@
+---
+type: add
+issue: 6398
+title: "The NPM package search module has been enhanced to support searching by
+ the package author and the package version attributes."
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6407-missing-conformity-to-ihe-balp.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6407-missing-conformity-to-ihe-balp.yaml
new file mode 100644
index 00000000000..f97d26b1547
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/6407-missing-conformity-to-ihe-balp.yaml
@@ -0,0 +1,4 @@
+---
+type: fix
+issue: 6407
+title: "Corrected IHE BALP AuditEvent generation, so that it records one Audit Event per resource owner. Thanks to Jens Villadsen (@jkiddo) for the contribution!"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/changes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/changes.yaml
new file mode 100644
index 00000000000..0331ee9c487
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/changes.yaml
@@ -0,0 +1,8 @@
+---
+- item:
+ type: "add"
+ title: "The version of a few dependencies have been bumped to more recent versions
+ (dependent HAPI modules listed in brackets):
+
+
org.hl7.fhir.core (Base): 6.3.25 -> 6.4.0
+
"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/upgrade.md
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/version.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/version.yaml
new file mode 100644
index 00000000000..3ce650fd9b3
--- /dev/null
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_8_0/version.yaml
@@ -0,0 +1,3 @@
+---
+release-date: "2025-02-17"
+codename: "TBD"
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/measures.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/measures.md
index ead8f4e806c..3f80405b863 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/measures.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/measures.md
@@ -111,8 +111,50 @@ A request using `periodStart` and `periodEnd` looks like:
```bash
GET fhir/Measure//$evaluate-measure?periodStart=2019-01-01&periodEnd=2019-12-31
```
+`periodStart` and `periodEnd` support Dates (YYYY, YYYY-MM, or YYYY-MM-DD) and DateTimes (YYYY-MM-DDThh:mm:ss). DateTime formats of YYYY-MM-DDThh:mm:ss+zz no longer accepted. To pass in timezones to period queries, please see the [Headers](#headers) section below:
-`periodStart` and `periodEnd` support Dates (YYYY, YYYY-MM, or YYYY-MM-DD) and DateTimes (YYYY-MM-DDThh:mm:ss+zz:zz)
+#### Headers
+
+The behaviour of the `periodStart` and `periodEnd` parameters depends on the value of the `Timezone` header. The measure report will be queried according to the period range, as denoted by that timezone, **not the server timezone**.
+
+Accepted values for this header are documented on the [Wikipedia timezones page](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)
+
+ex: `Timezone`:`America/Denver` will set the timezone to Mountain Time.
+
+If the client omits this header, the timezone will default to UTC.
+
+Please consult the below table for examples of various combinations of start, end, and timezone, as well as the resulting queried periods:
+
+| Request timezone | Start | End | Converted Start | Converted End |
+|--------------------| ---------------------| --------------------|---------------------------|---------------------------|
+| (unset) | (unset) | (unset) | N/A | N/A |
+| (unset) | 2020 | 2021 | 2020-01-01T00:00:00Z | 2021-12-31T23:59:59Z |
+| Z | 2020 | 2021 | 2020-01-01T00:00:00Z | 2021-12-31T23:59:59Z |
+| UTC | 2020 | 2021 | 2020-01-01T00:00:00Z | 2021-12-31T23:59:59Z |
+| America/St_Johns | 2020 | 2021 | 2020-01-01T00:00:00-03:30 | 2021-12-31T23:59:59-03:30 |
+| America/Toronto | 2020 | 2021 | 2020-01-01T00:00:00-05:00 | 2021-12-31T23:59:59-05:00 |
+| America/Denver | 2020 | 2021 | 2020-01-01T00:00:00-07:00 | 2021-12-31T23:59:59-07:00 |
+| (unset) | 2022-02 | 2022-08 | 2022-02-01T00:00:00Z | 2022-08-31T23:59:59Z |
+| UTC | 2022-02 | 2022-08 | 2022-02-01T00:00:00Z | 2022-08-31T23:59:59Z |
+| America/St_Johns | 2022-02 | 2022-08 | 2022-02-01T00:00:00-03:30 | 2022-08-31T23:59:59-02:30 |
+| America/Toronto | 2022-02 | 2022-08 | 2022-02-01T00:00:00-05:00 | 2022-08-31T23:59:59-04:00 |
+| America/Denver | 2022-02 | 2022-08 | 2022-02-01T00:00:00-07:00 | 2022-08-31T23:59:59-06:00 |
+| (unset) | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00Z | 2024-02-26T23:59:59Z |
+| UTC | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00Z | 2024-02-26T23:59:59Z |
+| America/St_Johns | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00-03:30 | 2024-02-26T23:59:59-03:30 |
+| America/Toronto | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00-05:00 | 2024-02-26T23:59:59-05:00 |
+| America/Denver | 2024-02-25 | 2024-02-26 | 2024-02-25T00:00:00-07:00 | 2024-02-26T23:59:59-07:00 |
+| (unset) | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00Z | 2024-09-26T23:59:59Z |
+| UTC | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00Z | 2024-09-26T23:59:59Z |
+| America/St_Johns | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00-02:30 | 2024-09-26T23:59:59-02:30 |
+| America/Toronto | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00-04:00 | 2024-09-26T23:59:59-04:00 |
+| America/Denver | 2024-09-25 | 2024-09-26 | 2024-09-25T00:00:00-06:00 | 2024-09-26T23:59:59-06:00 |
+| (unset) | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-06:00 | 2024-09-26T11:59:59-06:00 |
+| Z | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-06:00 | 2024-09-26T11:59:59-06:00 |
+| UTC | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-06:00 | 2024-09-26T11:59:59-06:00 |
+| America/St_Johns | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-02:30 | 2024-09-26T11:59:59-02:30 |
+| America/Toronto | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-04:00 | 2024-09-26T11:59:59-04:00 |
+| America/Denver | 2024-09-25T12:00:00 | 2024-09-26T12:00:00 | 2024-09-25T12:00:00-06:00 | 2024-09-26T11:59:59-06:00 |
#### Report Types
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/plan_definitions.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/plan_definitions.md
index 19df9962f8b..2140179883c 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/plan_definitions.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/plan_definitions.md
@@ -19,6 +19,7 @@ HAPI implements the following operations for PlanDefinitions:
* [$apply](/docs/clinical_reasoning/plan_definitions.html#apply)
* [$package](/docs/clinical_reasoning/plan_definitions.html#package)
+* [$data-requirements](/docs/clinical_reasoning/plan_definitions.html#datarequirements)
## Apply
@@ -40,32 +41,36 @@ GET http://your-server-base/fhir/PlanDefinition/opioidcds-10-patient-view/$apply
The following parameters are supported for the `PlanDefinition/$apply` and `PlanDefinition/$r5.apply` operation:
-| Parameter | Type | Description |
-|---------------------|---------------------------|-------------|
-| planDefinition | PlanDefinition | The plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter is required, or a url (and optionally version) must be supplied. |
-| canonical | canonical(PlanDefinition) | The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. |
-| url | uri | Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. |
-| version | string | Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. |
-| subject | string(reference) | The subject(s) that is/are the target of the plan definition to be applied. |
-| encounter | string(reference) | The encounter in context, if any. |
-| practitioner | string(reference) | The practitioner applying the plan definition. |
-| organization | string(reference) | The organization applying the plan definition. |
-| userType | CodeableConcept | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) |
-| userLanguage | CodeableConcept | Preferred language of the person using the system |
-| userTaskContext | CodeableConcept | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources. |
-| setting | CodeableConcept | The current setting of the request (inpatient, outpatient, etc.). |
-| settingContext | CodeableConcept | Additional detail about the setting of the request, if any |
-| parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. |
-| useServerData | boolean | Whether to use data from the server performing the evaluation. If this parameter is true (the default), then the operation will use data first from any bundles provided as parameters (through the data and prefetch parameters), second data from the server performing the operation, and third, data from the dataEndpoint parameter (if provided). If this parameter is false, the operation will use data first from the bundles provided in the data or prefetch parameters, and second from the dataEndpoint parameter (if provided). |
-| data | Bundle | Data to be made available to the PlanDefinition evaluation. |
-| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. |
-| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. |
-| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. |
+| Parameter | Type | Description |
+|-------------------------|---------------------------|-------------|
+| planDefinition | PlanDefinition | The plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter is required, or a url (and optionally version) must be supplied. |
+| canonical | canonical(PlanDefinition) | The canonical url of the plan definition to be applied. If the operation is invoked at the instance level, this parameter is not allowed; if the operation is invoked at the type level, this parameter (and optionally the version), or the planDefinition parameter must be supplied. |
+| url | uri | Canonical URL of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. |
+| version | string | Version of the PlanDefinition when invoked at the resource type level. This is exclusive with the planDefinition and canonical parameters. |
+| subject | string(reference) | The subject(s) that is/are the target of the plan definition to be applied. |
+| encounter | string(reference) | The encounter in context, if any. |
+| practitioner | string(reference) | The practitioner applying the plan definition. |
+| organization | string(reference) | The organization applying the plan definition. |
+| userType | CodeableConcept | The type of user initiating the request, e.g. patient, healthcare provider, or specific type of healthcare provider (physician, nurse, etc.) |
+| userLanguage | CodeableConcept | Preferred language of the person using the system |
+| userTaskContext | CodeableConcept | The task the system user is performing, e.g. laboratory results review, medication list review, etc. This information can be used to tailor decision support outputs, such as recommended information resources. |
+| setting | CodeableConcept | The current setting of the request (inpatient, outpatient, etc.). |
+| settingContext | CodeableConcept | Additional detail about the setting of the request, if any |
+| parameters | Parameters | Any input parameters defined in libraries referenced by the PlanDefinition. |
+| useServerData | boolean | Whether to use data from the server performing the evaluation. If this parameter is true (the default), then the operation will use data first from any bundles provided as parameters (through the data and prefetch parameters), second data from the server performing the operation, and third, data from the dataEndpoint parameter (if provided). If this parameter is false, the operation will use data first from the bundles provided in the data or prefetch parameters, and second from the dataEndpoint parameter (if provided). |
+| data | Bundle | Data to be made available to the PlanDefinition evaluation. |
+| prefetchData | | Data to be made available to the PlanDefinition evaluation, organized as prefetch response bundles. Each prefetchData parameter specifies either the name of the prefetchKey it is satisfying, a DataRequirement describing the prefetch, or both. |
+| prefetchData.key | string | The key of the prefetch item. This typically corresponds to the name of a parameter in a library, or the name of a prefetch item in a CDS Hooks discovery response. |
+| prefetchData.descriptor | DataRequirement | A DataRequirement describing the content of the prefetch item. |
+| prefetchData.data | Bundle | The prefetch data as a Bundle. If the prefetchData has no data part, it indicates there is no data associated with this prefetch item. |
+| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the PlanDefinition. |
+| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the PlanDefinition. |
+| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the PlanDefinition. |
## Package
-The `PlanDefinition/$package` [operation](https://build.fhir.org/ig/HL7/crmi-ig/OperationDefinition-crmi-package.html) for PlanDefinition will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html).
+The `PlanDefinition/$package` [operation](https://hl7.org/fhir/uv/crmi/OperationDefinition-crmi-package.html) for PlanDefinition will generate a Bundle of resources that includes the PlanDefinition as well as any related resources which can then be shared. This implementation follows the [CRMI IG](https://hl7.org/fhir/uv/crmi/index.html) guidance for [packaging artifacts](https://hl7.org/fhir/uv/crmi/packaging.html).
### Parameters
@@ -80,6 +85,22 @@ The following parameters are supported for the `PlanDefinition/$package` operati
| usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. |
+## DataRequirements
+
+The `PlanDefinition/$data-requirements` [operation](https://hl7.org/fhir/uv/crmi/OperationDefinition-crmi-data-requirements.html) for PlanDefinition will generate a Library of type `module-definition` that returns the computed effective requirements of the artifact.
+
+### Parameters
+
+The following parameters are supported for the `PlanDefinition/$data-requirements` operation:
+
+| Parameter | Type | Description |
+|-----------|-----------|----------------------------------------------------------------------------------------------------------------|
+| id | string | The logical id of the canonical or artifact resource to analyze. |
+| canonical | canonical | A canonical url (optionally version specific) to a canonical resource. |
+| url | uri | A canonical or artifact reference to a canonical resource. This is exclusive with the canonical parameter. |
+| version | string | The version of the canonical or artifact resource to analyze. This is exclusive with the canonical parameter. |
+
+
## Example PlanDefinition
```json
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/questionnaires.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/questionnaires.md
index 442322cb12e..8b2426a7d65 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/questionnaires.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/clinical_reasoning/questionnaires.md
@@ -16,7 +16,7 @@ HAPI implements the following operations for Questionnaires and QuestionnaireRes
* [$populate](/docs/clinical_reasoning/questionnaires.html#populate)
* [$extract](/docs/clinical_reasoning/questionnaires.html#extract)
* [$package](/docs/clinical_reasoning/questionnaires.html#package)
-
+* [$data-requirements](/docs/clinical_reasoning/questionnaires.html#datarequirements)
## Questionnaire
@@ -26,65 +26,69 @@ The `StructureDefinition/$questionnaire` [operation]() generates a [Questionnair
The following parameters are supported for the `StructureDefinition/$questionnaire` operation:
-| Parameter | Type | Description |
-|-----------|------|-------------|
-| profile | StructureDefinition | The StructureDefinition to base the Questionnaire on. Used when the operation is invoked at the 'type' level. |
-| canonical | canonical | The canonical identifier for the StructureDefinition (optionally version-specific). |
-| url | uri | Canonical URL of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. |
-| version | string | Version of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. |
-| supportedOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "mustSupport='true'" in the StructureDefinition. |
-| requiredOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "min>0" in the StructureDefinition. |
-| subject | string | The subject(s) that is/are the target of the Questionnaire. |
-| parameters | Parameters | Any input parameters defined in libraries referenced by the StructureDefinition. |
-| useServerData | boolean Whether to use data from the server performing the evaluation. |
-| data | Bundle | Data to be made available during CQL evaluation. |
-| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the StructureDefinition. |
-| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the StructureDefinition. |
-| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the StructureDefinition. |
+| Parameter | Type | Description |
+|---------------------|----------------------|-------------|
+| profile | StructureDefinition | The StructureDefinition to base the Questionnaire on. Used when the operation is invoked at the 'type' level. |
+| canonical | canonical | The canonical identifier for the StructureDefinition (optionally version-specific). |
+| url | uri | Canonical URL of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. |
+| version | string | Version of the StructureDefinition when invoked at the resource type level. This is exclusive with the profile and canonical parameters. |
+| supportedOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "mustSupport='true'" in the StructureDefinition. |
+| requiredOnly | boolean | If true (default: false), the questionnaire will only include those elements marked as "min>0" in the StructureDefinition. |
+| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the StructureDefinition. |
+| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the StructureDefinition. |
## Populate
-The `Questionnaire/$populate` [operation](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html) generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire.
+The `Questionnaire/$populate` [operation](https://hl7.org/fhir/uv/sdc/OperationDefinition-Questionnaire-populate.html) generates a [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) based on a specific [Questionnaire](https://www.hl7.org/fhir/questionnaire.html), filling in answers to questions where possible based on information provided as part of the operation or already known by the server about the subject of the Questionnaire.
+
+This implementation only allows for [Expression-based](https://hl7.org/fhir/uv/sdc/populate.html#expression-based-population) population.
+Additional parameters have been added to support CQL evaluation.
### Parameters
The following parameters are supported for the `Questionnaire/$populate` operation:
-| Parameter | Type | Description |
-|-----------|------|-------------|
-| questionnaire | Questionnaire | The Questionnaire to populate. Used when the operation is invoked at the 'type' level. |
-| canonical | canonical | The canonical identifier for the Questionnaire (optionally version-specific). |
-| url | uri | Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. |
-| version | string | Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. |
-| subject | string | The subject(s) that is/are the target of the Questionnaire. |
-| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. |
-| useServerData | boolean | Whether to use data from the server performing the evaluation. |
-| data | Bundle | Data to be made available during CQL evaluation. |
-| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the Questionnaire. |
-| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the Questionnaire. |
-| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the Questionnaire. |
-
-
+| Parameter | Type | Description |
+|---------------------|---------------|-------------|
+| questionnaire | Questionnaire | The Questionnaire to populate. Used when the operation is invoked at the 'type' level. |
+| canonical | canonical | The canonical identifier for the Questionnaire (optionally version-specific). |
+| url | uri | Canonical URL of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. |
+| version | string | Version of the Questionnaire when invoked at the resource type level. This is exclusive with the questionnaire and canonical parameters. |
+| subject | Reference | The resource that is to be the QuestionnaireResponse.subject. The QuestionnaireResponse instance will reference the provided subject. |
+| context | | Resources containing information to be used to help populate the QuestionnaireResponse. |
+| context.name | string | The name of the launchContext or root Questionnaire variable the passed content should be used as for population purposes. The name SHALL correspond to a launchContext or variable delared at the root of the Questionnaire. |
+| context.reference | Reference | The actual resource (or resources) to use as the value of the launchContext or variable. |
+| local | boolean | Whether the server should use what resources and other knowledge it has about the referenced subject when pre-populating answers to questions. |
+| launchContext | Extension | The [Questionnaire Launch Context](https://hl7.org/fhir/uv/sdc/StructureDefinition-sdc-questionnaire-launchContext.html) extension containing Resources that provide context for form processing logic (pre-population) when creating/displaying/editing a QuestionnaireResponse. |
+| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. |
+| useServerData | boolean | Whether to use data from the server performing the evaluation. |
+| data | Bundle | Data to be made available during CQL evaluation. |
+| dataEndpoint | Endpoint | An endpoint to use to access data referenced by retrieve operations in libraries referenced by the Questionnaire. |
+| contentEndpoint | Endpoint | An endpoint to use to access content (i.e. libraries) referenced by the Questionnaire. |
+| terminologyEndpoint | Endpoint | An endpoint to use to access terminology (i.e. valuesets, codesystems, and membership testing) referenced by the Questionnaire. |
+
## Extract
The `QuestionnaireResponse/$extract` [operation](http://hl7.org/fhir/uv/sdc/OperationDefinition-QuestionnaireResponse-extract.html) takes a completed [QuestionnaireResponse](https://www.hl7.org/fhir/questionnaireresponse.html) and converts it to a Bundle of resources by using metadata embedded in the [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) the QuestionnaireResponse is based on. The extracted resources might include Observations, MedicationStatements and other standard FHIR resources which can then be shared and manipulated. When invoking the $extract operation, care should be taken that the submitted QuestionnaireResponse is itself valid. If not, the extract operation could fail (with appropriate OperationOutcomes) or, more problematic, might succeed but provide incorrect output.
-This implementation allows for both [Observation based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction.
+This implementation allows for both [Observation-based](https://hl7.org/fhir/uv/sdc/extraction.html#observation-based-extraction) and [Definition-based](https://hl7.org/fhir/uv/sdc/extraction.html#definition-based-extraction) extraction.
### Parameters
The following parameters are supported for the `QuestionnaireResponse/$extract` operation:
-| Parameter | Type | Description |
-|-----------|------|-------------|
+| Parameter | Type | Description |
+|------------------------|-----------------------|-------------|
| questionnaire-response | QuestionnaireResponse | The QuestionnaireResponse to extract data from. Used when the operation is invoked at the 'type' level. |
-| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. |
-| data | Bundle | Data to be made available during CQL evaluation. |
+| questionnaire | Questionnaire | The Questionnaire the QuestionnaireResponse is answering. Used when the server does not have access to the Questionnaire. |
+| parameters | Parameters | Any input parameters defined in libraries referenced by the Questionnaire. |
+| useServerData | boolean | Whether to use data from the server performing the evaluation. |
+| data | Bundle | Data to be made available during CQL evaluation. |
## Package
-The `Questionnaire/$package` [operation](https://build.fhir.org/ig/HL7/crmi-ig/OperationDefinition-crmi-package.html) for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/index.html) guidance for [packaging artifacts](https://build.fhir.org/ig/HL7/crmi-ig/branches/master/packaging.html).
+The `Questionnaire/$package` [operation](https://hl7.org/fhir/uv/crmi/OperationDefinition-crmi-package.html) for [Questionnaire](https://www.hl7.org/fhir/questionnaire.html) will generate a Bundle of resources that includes the Questionnaire as well as any related Library or ValueSet resources which can then be shared. This implementation follows the [CRMI IG](https://hl7.org/fhir/uv/crmi/index.html) guidance for [packaging artifacts](https://hl7.org/fhir/uv/crmi/packaging.html).
### Parameters
@@ -99,6 +103,22 @@ The following parameters are supported for the `Questionnaire/$package` operatio
| usePut | boolean | Determines the type of method returned in the Bundle Entries: POST if False (the default), PUT if True. |
+## DataRequirements
+
+The `Questionnaire/$data-requirements` [operation](https://hl7.org/fhir/uv/crmi/OperationDefinition-crmi-data-requirements.html) for Questionnaire will generate a Library of type `module-definition` that returns the computed effective requirements of the artifact.
+
+### Parameters
+
+The following parameters are supported for the `Questionnaire/$data-requirements` operation:
+
+| Parameter | Type | Description |
+|-----------|-----------|----------------------------------------------------------------------------------------------------------------|
+| id | string | The logical id of the canonical or artifact resource to analyze. |
+| canonical | canonical | A canonical url (optionally version specific) to a canonical resource. |
+| url | uri | A canonical or artifact reference to a canonical resource. This is exclusive with the canonical parameter. |
+| version | string | The version of the canonical or artifact resource to analyze. This is exclusive with the canonical parameter. |
+
+
## Example Questionnaire
```json
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/contributing/hacking_guide.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/contributing/hacking_guide.md
index f1ad41ae49e..3e22b27e133 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/contributing/hacking_guide.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/contributing/hacking_guide.md
@@ -51,6 +51,14 @@ HAPI is built primary using [Apache Maven](http://maven.apache.org/). Even if yo
[INFO] Finished at: 2016-02-27T15:05:35+00:00
```
+# Rebuilding the Database Schema
+
+Database schema is built as part of your maven build, but in case you need to rebuild it later, you can use the command:
+
+ ```bash
+ mvn hapi-tinder:generate-ddl
+ ```
+
# Troubleshooting
If the build fails to execute successfully, try the following:
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties
index d4f2eb07628..7b59f6d19ea 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/files.properties
@@ -94,6 +94,7 @@ page.clinical_reasoning.overview=Clinical Reasoning Overview
page.clinical_reasoning.cql=CQL
page.clinical_reasoning.caregaps=Care Gaps
page.clinical_reasoning.measures=Measures
+page.clinical_reasoning.activity_definitions=ActivityDefinitions
page.clinical_reasoning.plan_definitions=PlanDefinitions
page.clinical_reasoning.questionnaires=Questionnaires
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/search.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/search.md
index e760b4a3dab..ecafafea17b 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/search.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/search.md
@@ -238,3 +238,7 @@ In order to improve sorting performance when chained sorts are needed, an [Uplif
# _include and _revinclude order
By default, all _revincludes will be performed first and then all _includes are performed afterwards. However, if any _revinclude parameters are modified with :iterate (or :recurse for earlier versions of FHIR) then all _include parameters will be evaluated first.
+
+# Custom Search Parameters
+
+HAPI FHIR has the ability to index and use custom search parameters, including parameters which enforce uniqueness, parametrs which index combinations of parameters, and parameters which are indexed but not used for searches until they are ready. See [Custom Search Parameters](https://smilecdr.com/docs/fhir_standard/fhir_search_custom_search_parameters.html) for more information.
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_batch/introduction.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_batch/introduction.md
index 1c3bb485c21..03468909d7f 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_batch/introduction.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa_batch/introduction.md
@@ -70,3 +70,7 @@ If a Job Definition is set to having Gated Execution, then all work chunks for a
### Job Instance Completion
A Batch Job Maintenance Service runs every minute to monitor the status of all Job Instances and the Job Instance is transitioned to either `COMPLETED`, `ERRORED` or `FAILED` according to the status of all outstanding work chunks for that job instance. If the job instance is still `IN_PROGRESS` this maintenance service also estimates the time remaining to complete the job.
+
+## Logging
+
+The job instance ID and work chunk ID are both available through the logback MDC and can be accessed using the `%X` specifier in a `logback.xml` file. See [Logging](/docs/appendix/logging.html#logging) for more details about logging in HAPI.
diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_plain/rest_operations_search.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_plain/rest_operations_search.md
index 291ef86af96..ccab261e903 100644
--- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_plain/rest_operations_search.md
+++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_plain/rest_operations_search.md
@@ -287,7 +287,7 @@ Dates are a special case, since it is a fairly common scenario to want to match
# Resource Includes (_include)
-FHIR allows clients to request that specific linked resources be included as contained resources, which means that they will be "embedded" in a special container called "contained" within the parent resource.
+Using the `_include` FHIR parameter, clients can request that specific linked resources be embedded directly within search results. These included resources will have a search.mode of "include".
HAPI allows you to add a parameter for accepting includes if you wish to support them for specific search methods.
diff --git a/hapi-fhir-jacoco/pom.xml b/hapi-fhir-jacoco/pom.xml
index c15128beca5..8b35bcb92c6 100644
--- a/hapi-fhir-jacoco/pom.xml
+++ b/hapi-fhir-jacoco/pom.xml
@@ -11,7 +11,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-base/pom.xml b/hapi-fhir-jaxrsserver-base/pom.xml
index 4346a3fd25d..822bce076e2 100644
--- a/hapi-fhir-jaxrsserver-base/pom.xml
+++ b/hapi-fhir-jaxrsserver-base/pom.xml
@@ -4,7 +4,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu2Test.java b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu2Test.java
index d985fa22243..1b08f76c0cb 100644
--- a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu2Test.java
+++ b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu2Test.java
@@ -1474,7 +1474,9 @@ public class GenericJaxRsClientDstu2Test {
.execute();
- assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", CAPTURE_SERVLET.ourRequestUri);
+ assertThat(CAPTURE_SERVLET.ourRequestUri).isIn(
+ ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
+ ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
// assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
@@ -1511,7 +1513,10 @@ public class GenericJaxRsClientDstu2Test {
assertThat(CAPTURE_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
- assertThat(CAPTURE_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname");
+ assertThat(CAPTURE_SERVLET.ourRequestUri).satisfiesAnyOf(
+ s -> assertThat(s).contains("_elements=identifier%2Cname"),
+ s -> assertThat(s).contains("_elements=name%2Cidentifier")
+ );
// assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
diff --git a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu3Test.java b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu3Test.java
index c376d692b2f..9b410455607 100644
--- a/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu3Test.java
+++ b/hapi-fhir-jaxrsserver-base/src/test/java/ca/uhn/fhir/jaxrs/client/GenericJaxRsClientDstu3Test.java
@@ -1546,7 +1546,9 @@ public class GenericJaxRsClientDstu3Test {
.execute();
//@formatter:on
- assertEquals(ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname", MY_SERVLET.ourRequestUri);
+ assertThat(MY_SERVLET.ourRequestUri).isIn(
+ ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=identifier%2Cname",
+ ourServer.getBaseUrl() + "/fhir/Patient/_search?_elements=name%2Cidentifier");
// assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
@@ -1583,7 +1585,10 @@ public class GenericJaxRsClientDstu3Test {
//@formatter:on
assertThat(MY_SERVLET.ourRequestUri).contains(ourServer.getBaseUrl() + "/fhir/Patient/_search?");
- assertThat(MY_SERVLET.ourRequestUri).contains("_elements=identifier%2Cname");
+ assertThat(MY_SERVLET.ourRequestUri).satisfiesAnyOf(
+ s -> assertThat(s).contains("_elements=identifier%2Cname"),
+ s -> assertThat(s).contains("_elements=name%2Cidentifier")
+ );
// assertThat(MY_SERVLET.ourRequestUri,
// either(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=name%2Cidentifier")).or(equalTo(ourServer.getBaseUrl() + "/fhir/Patient?name=james&_elements=identifier%2Cname")));
diff --git a/hapi-fhir-jpa/pom.xml b/hapi-fhir-jpa/pom.xml
index c01367fde30..71f13b15a60 100644
--- a/hapi-fhir-jpa/pom.xml
+++ b/hapi-fhir-jpa/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml
index 4a7722fa96d..7280901f219 100644
--- a/hapi-fhir-jpaserver-base/pom.xml
+++ b/hapi-fhir-jpaserver-base/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhirhapi-deployable-pom
- 7.5.1-SNAPSHOT
+ 7.7.3-SNAPSHOT../hapi-deployable-pom/pom.xml
@@ -414,7 +414,6 @@
ca.uhn.hapi.fhirhapi-tinder-plugin
- ${project.version}build_dstu2
@@ -525,7 +524,6 @@
ca.uhn.hapi.fhirhapi-tinder-plugin
- ${project.version}
@@ -534,6 +532,7 @@
+ falseca.uhn.fhir.jpa.entityca.uhn.fhir.jpa.model.entity
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImpl.java
index 5ea89d2adb4..c0826ad4dc0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch2/JpaJobPersistenceImpl.java
@@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.batch2;
import ca.uhn.fhir.batch2.api.IJobPersistence;
import ca.uhn.fhir.batch2.api.JobOperationResultJson;
+import ca.uhn.fhir.batch2.model.BatchInstanceStatusDTO;
+import ca.uhn.fhir.batch2.model.BatchWorkChunkStatusDTO;
import ca.uhn.fhir.batch2.model.FetchJobInstancesRequest;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.StatusEnum;
@@ -258,6 +260,22 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
.execute(() -> myJobInstanceRepository.findById(theInstanceId).map(this::toInstance));
}
+ @Nonnull
+ @Override
+ public List fetchWorkChunkStatusForInstance(String theInstanceId) {
+ return myTransactionService
+ .withSystemRequestOnDefaultPartition()
+ .execute(() -> myWorkChunkRepository.fetchWorkChunkStatusForInstance(theInstanceId));
+ }
+
+ @Nonnull
+ @Override
+ public BatchInstanceStatusDTO fetchBatchInstanceStatus(String theInstanceId) {
+ return myTransactionService
+ .withSystemRequestOnDefaultPartition()
+ .execute(() -> myJobInstanceRepository.fetchBatchInstanceStatus(theInstanceId));
+ }
+
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public List fetchInstances(FetchJobInstancesRequest theRequest, int thePage, int theBatchSize) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java
index 440ed7c27af..fca9611aed2 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/JpaBulkExportProcessor.java
@@ -625,7 +625,8 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor {
resourceToCheck = "Patient";
activeSearchParamName = "organization";
}
- return mySearchParamRegistry.getActiveSearchParam(resourceToCheck, activeSearchParamName);
+ return mySearchParamRegistry.getActiveSearchParam(
+ resourceToCheck, activeSearchParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
}
/**
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
index fb78410c64a..9a066b9dddb 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/JpaConfig.java
@@ -43,6 +43,7 @@ import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
+import ca.uhn.fhir.jpa.dao.CacheTagDefinitionDao;
import ca.uhn.fhir.jpa.dao.DaoSearchParamProvider;
import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
@@ -56,6 +57,7 @@ import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
import ca.uhn.fhir.jpa.dao.data.IResourceModifiedDao;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
+import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeEverythingService;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeOperation;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
@@ -377,17 +379,17 @@ public class JpaConfig {
@Bean
public TaskScheduler taskScheduler() {
- ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
- retVal.setConcurrentExecutor(scheduledExecutorService().getObject());
- retVal.setScheduledExecutor(scheduledExecutorService().getObject());
+ ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(
+ scheduledExecutorService().getObject(),
+ scheduledExecutorService().getObject());
return retVal;
}
@Bean(name = TASK_EXECUTOR_NAME)
public AsyncTaskExecutor taskExecutor() {
- ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
- retVal.setConcurrentExecutor(scheduledExecutorService().getObject());
- retVal.setScheduledExecutor(scheduledExecutorService().getObject());
+ ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler(
+ scheduledExecutorService().getObject(),
+ scheduledExecutorService().getObject());
return retVal;
}
@@ -893,4 +895,10 @@ public class JpaConfig {
FhirContext theFhirContext, HibernatePropertiesProvider theHibernatePropertiesProvider) {
return new ResourceHistoryCalculator(theFhirContext, theHibernatePropertiesProvider.isOracleDialect());
}
+
+ @Bean
+ public CacheTagDefinitionDao tagDefinitionDao(
+ ITagDefinitionDao tagDefinitionDao, MemoryCacheService memoryCacheService) {
+ return new CacheTagDefinitionDao(tagDefinitionDao, memoryCacheService);
+ }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
index e432143b291..4294a18918d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java
@@ -75,7 +75,6 @@ import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
import ca.uhn.fhir.jpa.util.AddRemoveCount;
-import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
@@ -89,7 +88,6 @@ import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
-import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@@ -107,14 +105,8 @@ import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import jakarta.annotation.PostConstruct;
import jakarta.persistence.EntityManager;
-import jakarta.persistence.NoResultException;
import jakarta.persistence.PersistenceContext;
import jakarta.persistence.PersistenceContextType;
-import jakarta.persistence.TypedQuery;
-import jakarta.persistence.criteria.CriteriaBuilder;
-import jakarta.persistence.criteria.CriteriaQuery;
-import jakarta.persistence.criteria.Predicate;
-import jakarta.persistence.criteria.Root;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
@@ -136,19 +128,11 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Repository;
-import org.springframework.transaction.PlatformTransactionManager;
-import org.springframework.transaction.TransactionDefinition;
-import org.springframework.transaction.TransactionStatus;
-import org.springframework.transaction.support.TransactionCallback;
-import org.springframework.transaction.support.TransactionSynchronization;
-import org.springframework.transaction.support.TransactionSynchronizationManager;
-import org.springframework.transaction.support.TransactionTemplate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
@@ -158,7 +142,6 @@ import java.util.stream.Collectors;
import javax.xml.stream.events.Characters;
import javax.xml.stream.events.XMLEvent;
-import static java.util.Objects.isNull;
import static java.util.Objects.nonNull;
import static org.apache.commons.collections4.CollectionUtils.isEqualCollection;
import static org.apache.commons.lang3.StringUtils.isBlank;
@@ -182,8 +165,6 @@ public abstract class BaseHapiFhirDao extends BaseStora
public static final long INDEX_STATUS_INDEXED = 1L;
public static final long INDEX_STATUS_INDEXING_FAILED = 2L;
public static final String NS_JPA_PROFILE = "https://github.com/hapifhir/hapi-fhir/ns/jpa/profile";
- // total attempts to do a tag transaction
- private static final int TOTAL_TAG_READ_ATTEMPTS = 10;
private static final Logger ourLog = LoggerFactory.getLogger(BaseHapiFhirDao.class);
private static boolean ourValidationDisabledForUnitTest;
private static boolean ourDisableIncrementOnUpdateForUnitTest = false;
@@ -248,17 +229,14 @@ public abstract class BaseHapiFhirDao extends BaseStora
@Autowired
private IPartitionLookupSvc myPartitionLookupSvc;
- @Autowired
- private MemoryCacheService myMemoryCacheService;
-
@Autowired(required = false)
private IFulltextSearchSvc myFulltextSearchSvc;
@Autowired
- private PlatformTransactionManager myTransactionManager;
+ protected ResourceHistoryCalculator myResourceHistoryCalculator;
@Autowired
- protected ResourceHistoryCalculator myResourceHistoryCalculator;
+ protected CacheTagDefinitionDao cacheTagDefinitionDao;
protected final CodingSpy myCodingSpy = new CodingSpy();
@@ -307,7 +285,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
TagList tagList = ResourceMetadataKeyEnum.TAG_LIST.get(theResource);
if (tagList != null) {
for (Tag next : tagList) {
- TagDefinition def = getTagOrNull(
+ TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails,
TagTypeEnum.TAG,
next.getScheme(),
@@ -326,7 +304,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
List securityLabels = ResourceMetadataKeyEnum.SECURITY_LABELS.get(theResource);
if (securityLabels != null) {
for (BaseCodingDt next : securityLabels) {
- TagDefinition def = getTagOrNull(
+ TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails,
TagTypeEnum.SECURITY_LABEL,
next.getSystemElement().getValue(),
@@ -345,7 +323,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
List profiles = ResourceMetadataKeyEnum.PROFILES.get(theResource);
if (profiles != null) {
for (IIdType next : profiles) {
- TagDefinition def = getTagOrNull(
+ TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null);
if (def != null) {
ResourceTag tag = theEntity.addTag(def);
@@ -364,7 +342,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
List extends IBaseCoding> tagList = theResource.getMeta().getTag();
if (tagList != null) {
for (IBaseCoding next : tagList) {
- TagDefinition def = getTagOrNull(
+ TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails,
TagTypeEnum.TAG,
next.getSystem(),
@@ -383,7 +361,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
List extends IBaseCoding> securityLabels = theResource.getMeta().getSecurity();
if (securityLabels != null) {
for (IBaseCoding next : securityLabels) {
- TagDefinition def = getTagOrNull(
+ TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails,
TagTypeEnum.SECURITY_LABEL,
next.getSystem(),
@@ -402,7 +380,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
List extends IPrimitiveType> profiles = theResource.getMeta().getProfile();
if (profiles != null) {
for (IPrimitiveType next : profiles) {
- TagDefinition def = getTagOrNull(
+ TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, next.getValue(), null, null, null);
if (def != null) {
ResourceTag tag = theEntity.addTag(def);
@@ -422,7 +400,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
if (!def.isStandardType()) {
String profile = def.getResourceProfile("");
if (isNotBlank(profile)) {
- TagDefinition profileDef = getTagOrNull(
+ TagDefinition profileDef = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails, TagTypeEnum.PROFILE, NS_JPA_PROFILE, profile, null, null, null);
ResourceTag tag = theEntity.addTag(profileDef);
@@ -456,164 +434,6 @@ public abstract class BaseHapiFhirDao extends BaseStora
myContext = theContext;
}
- /**
- * null will only be returned if the scheme and tag are both blank
- */
- protected TagDefinition getTagOrNull(
- TransactionDetails theTransactionDetails,
- TagTypeEnum theTagType,
- String theScheme,
- String theTerm,
- String theLabel,
- String theVersion,
- Boolean theUserSelected) {
- if (isBlank(theScheme) && isBlank(theTerm) && isBlank(theLabel)) {
- return null;
- }
-
- MemoryCacheService.TagDefinitionCacheKey key =
- toTagDefinitionMemoryCacheKey(theTagType, theScheme, theTerm, theVersion, theUserSelected);
-
- TagDefinition retVal = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key);
- if (retVal == null) {
- HashMap resolvedTagDefinitions =
- theTransactionDetails.getOrCreateUserData(
- HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, HashMap::new);
-
- retVal = resolvedTagDefinitions.get(key);
-
- if (retVal == null) {
- // actual DB hit(s) happen here
- retVal = getOrCreateTag(theTagType, theScheme, theTerm, theLabel, theVersion, theUserSelected);
-
- TransactionSynchronization sync = new AddTagDefinitionToCacheAfterCommitSynchronization(key, retVal);
- TransactionSynchronizationManager.registerSynchronization(sync);
-
- resolvedTagDefinitions.put(key, retVal);
- }
- }
-
- return retVal;
- }
-
- /**
- * Gets the tag defined by the fed in values, or saves it if it does not
- * exist.
- *
- * Can also throw an InternalErrorException if something bad happens.
- */
- private TagDefinition getOrCreateTag(
- TagTypeEnum theTagType,
- String theScheme,
- String theTerm,
- String theLabel,
- String theVersion,
- Boolean theUserSelected) {
-
- TypedQuery q = buildTagQuery(theTagType, theScheme, theTerm, theVersion, theUserSelected);
- q.setMaxResults(1);
-
- TransactionTemplate template = new TransactionTemplate(myTransactionManager);
- template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
-
- // this transaction will attempt to get or create the tag,
- // repeating (on any failure) 10 times.
- // if it fails more than this, we will throw exceptions
- TagDefinition retVal;
- int count = 0;
- HashSet throwables = new HashSet<>();
- do {
- try {
- retVal = template.execute(new TransactionCallback() {
-
- // do the actual DB call(s) to read and/or write the values
- private TagDefinition readOrCreate() {
- TagDefinition val;
- try {
- val = q.getSingleResult();
- } catch (NoResultException e) {
- val = new TagDefinition(theTagType, theScheme, theTerm, theLabel);
- val.setVersion(theVersion);
- val.setUserSelected(theUserSelected);
- myEntityManager.persist(val);
- }
- return val;
- }
-
- @Override
- public TagDefinition doInTransaction(TransactionStatus status) {
- TagDefinition tag = null;
-
- try {
- tag = readOrCreate();
- } catch (Exception ex) {
- // log any exceptions - just in case
- // they may be signs of things to come...
- ourLog.warn(
- "Tag read/write failed: "
- + ex.getMessage() + ". "
- + "This is not a failure on its own, "
- + "but could be useful information in the result of an actual failure.",
- ex);
- throwables.add(ex);
- }
-
- return tag;
- }
- });
- } catch (Exception ex) {
- // transaction template can fail if connections to db are exhausted and/or timeout
- ourLog.warn(
- "Transaction failed with: {}. Transaction will rollback and be reattempted.", ex.getMessage());
- retVal = null;
- }
- count++;
- } while (retVal == null && count < TOTAL_TAG_READ_ATTEMPTS);
-
- if (retVal == null) {
- // if tag is still null,
- // something bad must be happening
- // - throw
- String msg = throwables.stream().map(Throwable::getMessage).collect(Collectors.joining(", "));
- throw new InternalErrorException(Msg.code(2023)
- + "Tag get/create failed after "
- + TOTAL_TAG_READ_ATTEMPTS
- + " attempts with error(s): "
- + msg);
- }
-
- return retVal;
- }
-
- private TypedQuery buildTagQuery(
- TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) {
- CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
- CriteriaQuery cq = builder.createQuery(TagDefinition.class);
- Root from = cq.from(TagDefinition.class);
-
- List predicates = new ArrayList<>();
- predicates.add(builder.and(
- builder.equal(from.get("myTagType"), theTagType), builder.equal(from.get("myCode"), theTerm)));
-
- predicates.add(
- isBlank(theScheme)
- ? builder.isNull(from.get("mySystem"))
- : builder.equal(from.get("mySystem"), theScheme));
-
- predicates.add(
- isBlank(theVersion)
- ? builder.isNull(from.get("myVersion"))
- : builder.equal(from.get("myVersion"), theVersion));
-
- predicates.add(
- isNull(theUserSelected)
- ? builder.isNull(from.get("myUserSelected"))
- : builder.equal(from.get("myUserSelected"), theUserSelected));
-
- cq.where(predicates.toArray(new Predicate[0]));
- return myEntityManager.createQuery(cq);
- }
-
void incrementId(T theResource, ResourceTable theSavedEntity, IIdType theResourceId) {
if (theResourceId == null || theResourceId.getVersionIdPart() == null) {
theSavedEntity.initializeVersion();
@@ -933,7 +753,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
@Override
@CoverageIgnore
public BaseHasResource readEntity(IIdType theValueId, RequestDetails theRequest) {
- throw new NotImplementedException(Msg.code(927) + "");
+ throw new NotImplementedException(Msg.code(927));
}
/**
@@ -1839,9 +1659,7 @@ public abstract class BaseHapiFhirDao extends BaseStora
}
@PostConstruct
- public void start() {
- // nothing yet
- }
+ public void start() {}
@VisibleForTesting
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
@@ -1880,30 +1698,6 @@ public abstract class BaseHapiFhirDao extends BaseStora
myJpaStorageResourceParser = theJpaStorageResourceParser;
}
- private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
-
- private final TagDefinition myTagDefinition;
- private final MemoryCacheService.TagDefinitionCacheKey myKey;
-
- public AddTagDefinitionToCacheAfterCommitSynchronization(
- MemoryCacheService.TagDefinitionCacheKey theKey, TagDefinition theTagDefinition) {
- myTagDefinition = theTagDefinition;
- myKey = theKey;
- }
-
- @Override
- public void afterCommit() {
- myMemoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, myKey, myTagDefinition);
- }
- }
-
- @Nonnull
- public static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(
- TagTypeEnum theTagType, String theScheme, String theTerm, String theVersion, Boolean theUserSelected) {
- return new MemoryCacheService.TagDefinitionCacheKey(
- theTagType, theScheme, theTerm, theVersion, theUserSelected);
- }
-
@SuppressWarnings("unchecked")
public static String parseContentTextIntoWords(FhirContext theContext, IBaseResource theResource) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
index fbd583d0090..f1d52bbd30b 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
@@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.IJobPartitionProvider;
-import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.context.FhirVersionEnum;
@@ -158,6 +157,7 @@ import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static ca.uhn.fhir.batch2.jobs.reindex.ReindexUtils.JOB_REINDEX;
import static java.util.Objects.isNull;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@@ -1043,7 +1043,7 @@ public abstract class BaseHapiFhirResourceDao extends B
if (!entityHasTag) {
theEntity.setHasTags(true);
- TagDefinition def = getTagOrNull(
+ TagDefinition def = cacheTagDefinitionDao.getTagOrNull(
theTransactionDetails,
nextDef.getTagType(),
nextDef.getSystem(),
@@ -1315,7 +1315,7 @@ public abstract class BaseHapiFhirResourceDao extends B
myJobPartitionProvider.getPartitionedUrls(theRequestDetails, urls).forEach(params::addPartitionedUrl);
JobInstanceStartRequest request = new JobInstanceStartRequest();
- request.setJobDefinitionId(ReindexAppCtx.JOB_REINDEX);
+ request.setJobDefinitionId(JOB_REINDEX);
request.setParameters(params);
myJobCoordinator.startInstance(theRequestDetails, request);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/CacheTagDefinitionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/CacheTagDefinitionDao.java
new file mode 100644
index 00000000000..342a1e3402b
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/CacheTagDefinitionDao.java
@@ -0,0 +1,132 @@
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2024 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package ca.uhn.fhir.jpa.dao;
+
+import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
+import ca.uhn.fhir.jpa.model.entity.TagDefinition;
+import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
+import ca.uhn.fhir.jpa.util.MemoryCacheService;
+import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
+import jakarta.annotation.Nonnull;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.data.domain.Pageable;
+import org.springframework.stereotype.Repository;
+import org.springframework.transaction.support.TransactionSynchronization;
+import org.springframework.transaction.support.TransactionSynchronizationManager;
+
+import java.util.HashMap;
+import java.util.List;
+
+import static org.apache.commons.lang3.StringUtils.isBlank;
+
+@Repository
+public class CacheTagDefinitionDao {
+ private static final Logger ourLog = LoggerFactory.getLogger(CacheTagDefinitionDao.class);
+
+ private final ITagDefinitionDao tagDefinitionDao;
+ private final MemoryCacheService memoryCacheService;
+
+ public CacheTagDefinitionDao(ITagDefinitionDao tagDefinitionDao, MemoryCacheService memoryCacheService) {
+ this.tagDefinitionDao = tagDefinitionDao;
+ this.memoryCacheService = memoryCacheService;
+ }
+
+ /**
+ * Returns a TagDefinition or null if the scheme, term, and label are all blank.
+ */
+ protected TagDefinition getTagOrNull(
+ TransactionDetails transactionDetails,
+ TagTypeEnum tagType,
+ String scheme,
+ String term,
+ String label,
+ String version,
+ Boolean userSelected) {
+
+ if (isBlank(scheme) && isBlank(term) && isBlank(label)) {
+ return null;
+ }
+
+ MemoryCacheService.TagDefinitionCacheKey key =
+ toTagDefinitionMemoryCacheKey(tagType, scheme, term, version, userSelected);
+ TagDefinition tagDefinition = memoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key);
+
+ if (tagDefinition == null) {
+ HashMap resolvedTagDefinitions =
+ transactionDetails.getOrCreateUserData("resolvedTagDefinitions", HashMap::new);
+
+ tagDefinition = resolvedTagDefinitions.get(key);
+
+ if (tagDefinition == null) {
+ tagDefinition = getOrCreateTag(tagType, scheme, term, label, version, userSelected);
+
+ TransactionSynchronization sync =
+ new AddTagDefinitionToCacheAfterCommitSynchronization(key, tagDefinition);
+ TransactionSynchronizationManager.registerSynchronization(sync);
+
+ resolvedTagDefinitions.put(key, tagDefinition);
+ }
+ }
+
+ return tagDefinition;
+ }
+
+ /**
+ * Gets or creates a TagDefinition entity.
+ */
+ private TagDefinition getOrCreateTag(
+ TagTypeEnum tagType, String scheme, String term, String label, String version, Boolean userSelected) {
+ List result = tagDefinitionDao.findByTagTypeAndSchemeAndTermAndVersionAndUserSelected(
+ tagType, scheme, term, version, userSelected, Pageable.ofSize(1));
+
+ if (!result.isEmpty()) {
+ return result.get(0);
+ } else {
+ // Create a new TagDefinition if no result is found
+ TagDefinition newTag = new TagDefinition(tagType, scheme, term, label);
+ newTag.setVersion(version);
+ newTag.setUserSelected(userSelected);
+ return tagDefinitionDao.save(newTag);
+ }
+ }
+
+ @Nonnull
+ private static MemoryCacheService.TagDefinitionCacheKey toTagDefinitionMemoryCacheKey(
+ TagTypeEnum tagType, String scheme, String term, String version, Boolean userSelected) {
+ return new MemoryCacheService.TagDefinitionCacheKey(tagType, scheme, term, version, userSelected);
+ }
+
+ private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
+ private final TagDefinition tagDefinition;
+ private final MemoryCacheService.TagDefinitionCacheKey key;
+
+ public AddTagDefinitionToCacheAfterCommitSynchronization(
+ MemoryCacheService.TagDefinitionCacheKey key, TagDefinition tagDefinition) {
+ this.tagDefinition = tagDefinition;
+ this.key = key;
+ }
+
+ @Override
+ public void afterCommit() {
+ memoryCacheService.put(MemoryCacheService.CacheEnum.TAG_DEFINITION, key, tagDefinition);
+ }
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
index 5c45b2ca875..0bccd1b6c67 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java
@@ -137,7 +137,8 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
public ExtendedHSearchIndexData extractLuceneIndexData(
IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
String resourceType = myFhirContext.getResourceType(theResource);
- ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(resourceType);
+ ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
+ resourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
return extractor.extract(theResource, theNewParams);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java
index 5e8f6b053bb..270238da3fb 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaResourceDaoCodeSystem.java
@@ -27,7 +27,11 @@ import ca.uhn.fhir.context.support.IValidationSupport.CodeValidationResult;
import ca.uhn.fhir.context.support.LookupCodeRequest;
import ca.uhn.fhir.context.support.ValidationSupportContext;
import ca.uhn.fhir.i18n.Msg;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
+import ca.uhn.fhir.jpa.api.dao.ReindexOutcome;
+import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
+import ca.uhn.fhir.jpa.api.model.ReindexJobStatus;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@@ -176,6 +180,47 @@ public class JpaResourceDaoCodeSystem extends BaseHapiF
myTermDeferredStorageSvc.deleteCodeSystemForResource(theEntityToDelete);
}
+ /**
+ * If there are more code systems to process
+ * than {@link JpaStorageSettings#getDeferIndexingForCodesystemsOfSize()},
+ * then these codes will have their processing deferred (for a later time).
+ *
+ * This can result in future reindex steps *skipping* these code systems (if
+ * they're still deferred) and thus incorrect expansions resulting.
+ *
+ * So we override the reindex method for CodeSystems specifically to
+ * force reindex batch jobs to wait until all code systems are processed before
+ * moving on.
+ */
+ @SuppressWarnings("rawtypes")
+ @Override
+ public ReindexOutcome reindex(
+ IResourcePersistentId thePid,
+ ReindexParameters theReindexParameters,
+ RequestDetails theRequest,
+ TransactionDetails theTransactionDetails) {
+ ReindexOutcome outcome = super.reindex(thePid, theReindexParameters, theRequest, theTransactionDetails);
+
+ if (outcome.getWarnings().isEmpty()) {
+ outcome.setHasPendingWork(true);
+ }
+ return outcome;
+ }
+
+ @Override
+ public ReindexJobStatus getReindexJobStatus() {
+ boolean isQueueEmpty = myTermDeferredStorageSvc.isStorageQueueEmpty(true);
+
+ ReindexJobStatus status = new ReindexJobStatus();
+ status.setHasReindexWorkPending(!isQueueEmpty);
+ if (status.isHasReindexWorkPending()) {
+ // force a run
+ myTermDeferredStorageSvc.saveDeferred();
+ }
+
+ return status;
+ }
+
@Override
public ResourceTable updateEntity(
RequestDetails theRequest,
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java
index 023fd93af64..b027702cf6e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2JobInstanceRepository.java
@@ -19,6 +19,7 @@
*/
package ca.uhn.fhir.jpa.dao.data;
+import ca.uhn.fhir.batch2.model.BatchInstanceStatusDTO;
import ca.uhn.fhir.batch2.model.StatusEnum;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import org.springframework.data.domain.Pageable;
@@ -91,4 +92,8 @@ public interface IBatch2JobInstanceRepository
@Query("SELECT e FROM Batch2JobInstanceEntity e WHERE e.myDefinitionId = :jobDefinitionId")
List findInstancesByJobDefinitionId(
@Param("jobDefinitionId") String theJobDefinitionId, Pageable thePageRequest);
+
+ @Query(
+ "SELECT new ca.uhn.fhir.batch2.model.BatchInstanceStatusDTO(e.myId, e.myStatus, e.myStartTime, e.myEndTime) FROM Batch2JobInstanceEntity e WHERE e.myId = :id")
+ BatchInstanceStatusDTO fetchBatchInstanceStatus(@Param("id") String theInstanceId);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java
index 52319b8efe1..e9611614e45 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/IBatch2WorkChunkRepository.java
@@ -19,6 +19,7 @@
*/
package ca.uhn.fhir.jpa.dao.data;
+import ca.uhn.fhir.batch2.model.BatchWorkChunkStatusDTO;
import ca.uhn.fhir.batch2.model.WorkChunkStatusEnum;
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
import org.springframework.data.domain.Pageable;
@@ -147,4 +148,8 @@ public interface IBatch2WorkChunkRepository
@Param("instanceId") String theInstanceId,
@Param("stepId") String theStepId,
@Param("status") WorkChunkStatusEnum theStatus);
+
+ @Query(
+ "SELECT new ca.uhn.fhir.batch2.model.BatchWorkChunkStatusDTO(e.myTargetStepId, e.myStatus, min(e.myStartTime), max(e.myEndTime), avg(e.myEndTime - e.myStartTime), count(*)) FROM Batch2WorkChunkEntity e WHERE e.myInstanceId=:instanceId GROUP BY e.myTargetStepId, e.myStatus")
+ List fetchWorkChunkStatusForInstance(@Param("instanceId") String theInstanceId);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITagDefinitionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITagDefinitionDao.java
index b3ecacfdda3..efbce1573e5 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITagDefinitionDao.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITagDefinitionDao.java
@@ -20,8 +20,25 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
+import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
+import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
+import org.springframework.data.jpa.repository.Query;
+import org.springframework.data.repository.query.Param;
+
+import java.util.List;
public interface ITagDefinitionDao extends JpaRepository, IHapiFhirJpaRepository {
- // nothing
+ @Query("SELECT t FROM TagDefinition t WHERE " + "t.myTagType = :tagType AND "
+ + "( :scheme IS NULL OR :scheme = '' OR t.mySystem = :scheme ) AND "
+ + "t.myCode = :term AND "
+ + "( :version IS NULL OR :version = '' OR t.myVersion = :version ) AND "
+ + "( :userSelected IS NULL OR t.myUserSelected = :userSelected )")
+ List findByTagTypeAndSchemeAndTermAndVersionAndUserSelected(
+ @Param("tagType") TagTypeEnum tagType,
+ @Param("scheme") String scheme,
+ @Param("term") String term,
+ @Param("version") String version,
+ @Param("userSelected") Boolean userSelected,
+ Pageable pageable);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java
index dc5cc4d70b2..79029f95585 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/ExtendedHSearchSearchBuilder.java
@@ -92,7 +92,8 @@ public class ExtendedHSearchSearchBuilder {
String theResourceType, SearchParameterMap myParams, ISearchParamRegistry theSearchParamRegistry) {
boolean canUseHibernate = false;
- ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(theResourceType);
+ ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(
+ theResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
for (String paramName : myParams.keySet()) {
// is this parameter supported?
if (illegalForHibernateSearch(paramName, resourceActiveSearchParams)) {
@@ -218,7 +219,8 @@ public class ExtendedHSearchSearchBuilder {
// copy the keys to avoid concurrent modification error
ArrayList paramNames = compileParamNames(searchParameterMap);
- ResourceSearchParams activeSearchParams = searchParamRegistry.getActiveSearchParams(resourceType);
+ ResourceSearchParams activeSearchParams = searchParamRegistry.getActiveSearchParams(
+ resourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
for (String nextParam : paramNames) {
if (illegalForHibernateSearch(nextParam, activeSearchParams)) {
// ignore magic params handled in JPA
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchSortHelperImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchSortHelperImpl.java
index e3ba26a504b..cf00f237321 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchSortHelperImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/search/HSearchSortHelperImpl.java
@@ -151,7 +151,8 @@ public class HSearchSortHelperImpl implements IHSearchSortHelper {
*/
@VisibleForTesting
Optional getParamType(String theResourceTypeName, String theParamName) {
- ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(theResourceTypeName);
+ ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
+ theResourceTypeName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
RuntimeSearchParam searchParam = activeSearchParams.get(theParamName);
if (searchParam == null) {
return Optional.empty();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProviderWithIntrospection.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProviderWithIntrospection.java
index 965914cf85a..1d08ddbc41d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProviderWithIntrospection.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/graphql/GraphQLProviderWithIntrospection.java
@@ -181,7 +181,8 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider {
for (String nextResourceType : theResourceTypes) {
StructureDefinition sd = fetchStructureDefinition(nextResourceType);
List parameters = toR5SearchParams(mySearchParamRegistry
- .getActiveSearchParams(nextResourceType)
+ .getActiveSearchParams(
+ nextResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
.values());
myGenerator.generateResource(writer, sd, parameters, theOperations);
}
@@ -198,7 +199,8 @@ public class GraphQLProviderWithIntrospection extends GraphQLProvider {
}
if (theOperations.contains(GraphQLSchemaGenerator.FHIROperationType.SEARCH)) {
List parameters = toR5SearchParams(mySearchParamRegistry
- .getActiveSearchParams(nextResourceType)
+ .getActiveSearchParams(
+ nextResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
.values());
myGenerator.generateListAccessQuery(writer, parameters, nextResourceType);
myGenerator.generateConnectionAccessQuery(writer, parameters, nextResourceType);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index 6f0bb2b3c3d..c43ed2d4827 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -125,6 +125,33 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
init700();
init720();
init740();
+ init780();
+ }
+
+ protected void init780() {
+ final Builder version = forVersion(VersionEnum.V7_8_0);
+
+ version.onTable("NPM_PACKAGE_VER")
+ .addColumn("20241023.10", "PKG_AUTHOR")
+ .nullable()
+ .type(ColumnTypeEnum.STRING, 512);
+ version.onTable("NPM_PACKAGE_VER")
+ .addColumn("20241023.20", "AUTHOR_UPPER")
+ .nullable()
+ .type(ColumnTypeEnum.STRING, 512);
+ version.onTable("NPM_PACKAGE_VER")
+ .modifyColumn("20241023.30", "PKG_DESC")
+ .nullable()
+ .withType(ColumnTypeEnum.STRING, 512);
+ version.onTable("NPM_PACKAGE_VER")
+ .modifyColumn("20241023.40", "DESC_UPPER")
+ .nullable()
+ .withType(ColumnTypeEnum.STRING, 512);
+
+ version.onTable("NPM_PACKAGE")
+ .modifyColumn("20241023.50", "PACKAGE_DESC")
+ .nullable()
+ .withType(ColumnTypeEnum.STRING, 512);
}
protected void init740() {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java
index 1d306f609ce..18f36ca0fb7 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/JpaPackageCache.java
@@ -301,15 +301,10 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
boolean currentVersion =
updateCurrentVersionFlagForAllPackagesBasedOnNewIncomingVersion(packageId, packageVersionId);
- String packageDesc = null;
- if (npmPackage.description() != null) {
- if (npmPackage.description().length() > NpmPackageVersionEntity.PACKAGE_DESC_LENGTH) {
- packageDesc = npmPackage.description().substring(0, NpmPackageVersionEntity.PACKAGE_DESC_LENGTH - 4)
- + "...";
- } else {
- packageDesc = npmPackage.description();
- }
- }
+
+ String packageDesc = truncateStorageString(npmPackage.description());
+ String packageAuthor = truncateStorageString(npmPackage.getNpm().asString("author"));
+
if (currentVersion) {
getProcessingMessages(npmPackage)
.add("Marking package " + packageId + "#" + initialPackageVersionId + " as current version");
@@ -327,6 +322,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
packageVersion.setPackage(pkg);
packageVersion.setPackageBinary(persistedPackage);
packageVersion.setSavedTime(new Date());
+ packageVersion.setAuthor(packageAuthor);
packageVersion.setDescription(packageDesc);
packageVersion.setFhirVersionId(npmPackage.fhirVersion());
packageVersion.setFhirVersion(fhirVersion);
@@ -625,6 +621,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
NpmPackageMetadataJson.Version version = new NpmPackageMetadataJson.Version();
version.setFhirVersion(next.getFhirVersionId());
+ version.setAuthor(next.getAuthor());
version.setDescription(next.getDescription());
version.setName(next.getPackageId());
version.setVersion(next.getVersionId());
@@ -682,7 +679,8 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
retVal.addObject()
.getPackage()
.setName(next.getPackageId())
- .setDescription(next.getPackage().getDescription())
+ .setAuthor(next.getAuthor())
+ .setDescription(next.getDescription())
.setVersion(next.getVersionId())
.addFhirVersion(next.getFhirVersionId())
.setBytes(next.getPackageSizeBytes());
@@ -791,10 +789,21 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
predicates.add(theCb.equal(resources.get("myCanonicalUrl"), thePackageSearchSpec.getResourceUrl()));
}
+ if (isNotBlank(thePackageSearchSpec.getVersion())) {
+ String searchTerm = thePackageSearchSpec.getVersion() + "%";
+ predicates.add(theCb.like(theRoot.get("myVersionId"), searchTerm));
+ }
+
if (isNotBlank(thePackageSearchSpec.getDescription())) {
String searchTerm = "%" + thePackageSearchSpec.getDescription() + "%";
searchTerm = StringUtil.normalizeStringForSearchIndexing(searchTerm);
- predicates.add(theCb.like(theRoot.get("myDescriptionUpper"), searchTerm));
+ predicates.add(theCb.like(theCb.upper(theRoot.get("myDescriptionUpper")), searchTerm));
+ }
+
+ if (isNotBlank(thePackageSearchSpec.getAuthor())) {
+ String searchTerm = "%" + thePackageSearchSpec.getAuthor() + "%";
+ searchTerm = StringUtil.normalizeStringForSearchIndexing(searchTerm);
+ predicates.add(theCb.like(theRoot.get("myAuthorUpper"), searchTerm));
}
if (isNotBlank(thePackageSearchSpec.getFhirVersion())) {
@@ -816,4 +825,21 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
return (List)
thePackage.getUserData().computeIfAbsent("JpPackageCache_ProcessingMessages", t -> new ArrayList<>());
}
+
+ /**
+ * Truncates a string to {@link NpmPackageVersionEntity#PACKAGE_DESC_LENGTH} which is
+ * the maximum length used on several columns in {@link NpmPackageVersionEntity}. If the
+ * string is longer than the maximum allowed, the last 3 characters are replaced with "..."
+ */
+ private static String truncateStorageString(String theInput) {
+ String retVal = null;
+ if (theInput != null) {
+ if (theInput.length() > NpmPackageVersionEntity.PACKAGE_DESC_LENGTH) {
+ retVal = theInput.substring(0, NpmPackageVersionEntity.PACKAGE_DESC_LENGTH - 4) + "...";
+ } else {
+ retVal = theInput;
+ }
+ }
+ return retVal;
+ }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java
index a7943770a02..f857c8c49da 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageMetadataJson.java
@@ -115,6 +115,9 @@ public class NpmPackageMetadataJson {
@JsonProperty("version")
private String myVersion;
+ @JsonProperty("author")
+ private String myAuthor;
+
@JsonProperty("description")
private String myDescription;
@@ -125,6 +128,14 @@ public class NpmPackageMetadataJson {
@JsonProperty("_bytes")
private long myBytes;
+ public String getAuthor() {
+ return myAuthor;
+ }
+
+ public void setAuthor(String theAuthor) {
+ myAuthor = theAuthor;
+ }
+
public String getName() {
return myName;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageSearchResultJson.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageSearchResultJson.java
index 394d2f97dc8..ca08f01beaf 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageSearchResultJson.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/NpmPackageSearchResultJson.java
@@ -111,6 +111,9 @@ public class NpmPackageSearchResultJson {
@JsonProperty("version")
private String myVersion;
+ @JsonProperty("author")
+ private String myAuthor;
+
@JsonProperty("description")
private String myDescription;
@@ -171,5 +174,14 @@ public class NpmPackageSearchResultJson {
}
return this;
}
+
+ public String getAuthor() {
+ return myAuthor;
+ }
+
+ public Package setAuthor(String theAuthor) {
+ myAuthor = theAuthor;
+ return this;
+ }
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java
index 122caf5b490..b2a0f31bef4 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageInstallerSvcImpl.java
@@ -598,7 +598,8 @@ public class PackageInstallerSvcImpl implements IPackageInstallerSvc {
return (statusTypes.get(0).getValueAsString().equals("requested"));
case "DocumentReference":
case "Communication":
- return (!statusTypes.get(0).getValueAsString().equals("?"));
+ return (statusTypes.get(0).isEmpty()
+ || !statusTypes.get(0).getValueAsString().equals("?"));
default:
return (statusTypes.get(0).getValueAsString().equals("active"));
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageSearchSpec.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageSearchSpec.java
index 2575c511ae7..099b7c68378 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageSearchSpec.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/packages/PackageSearchSpec.java
@@ -27,6 +27,8 @@ public class PackageSearchSpec {
private String myResourceUrl;
private CharSequence myDescription;
private String myFhirVersion;
+ private String myVersion;
+ private String myAuthor;
public String getFhirVersion() {
return myFhirVersion;
@@ -69,4 +71,20 @@ public class PackageSearchSpec {
public void setDescription(CharSequence theDescription) {
myDescription = theDescription;
}
+
+ public String getVersion() {
+ return myVersion;
+ }
+
+ public void setVersion(String theVersion) {
+ myVersion = theVersion;
+ }
+
+ public void setAuthor(String theAuthor) {
+ myAuthor = theAuthor;
+ }
+
+ public String getAuthor() {
+ return myAuthor;
+ }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionLookupSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionLookupSvcImpl.java
index 84af0eb972c..adcf2923864 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionLookupSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/partition/PartitionLookupSvcImpl.java
@@ -203,6 +203,12 @@ public class PartitionLookupSvcImpl implements IPartitionLookupSvc {
myPartitionDao.delete(partition.get());
+ if (myInterceptorService.hasHooks(Pointcut.STORAGE_PARTITION_DELETED)) {
+ HookParams params = new HookParams()
+ .add(RequestPartitionId.class, partition.get().toRequestPartitionId());
+ myInterceptorService.callHooks(Pointcut.STORAGE_PARTITION_DELETED, params);
+ }
+
invalidateCaches();
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java
index 0ab99892e74..ff94791e033 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaConformanceProviderDstu3.java
@@ -190,11 +190,12 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
* global params like _lastUpdated
*/
ResourceSearchParams searchParams;
- ResourceSearchParams serverConfigurationActiveSearchParams =
- myServerConfiguration.getActiveSearchParams(theResourceName);
+ ResourceSearchParams serverConfigurationActiveSearchParams = myServerConfiguration.getActiveSearchParams(
+ theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
if (mySearchParamRegistry != null) {
- searchParams =
- mySearchParamRegistry.getActiveSearchParams(theResourceName).makeCopy();
+ searchParams = mySearchParamRegistry
+ .getActiveSearchParams(theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
+ .makeCopy();
if (searchParams == null) {
return ResourceSearchParams.empty(theResourceName);
}
@@ -229,8 +230,8 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
if (isBlank(otherResourceType)) {
continue;
}
- ResourceSearchParams activeSearchParams =
- mySearchParamRegistry.getActiveSearchParams(otherResourceType);
+ ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
+ otherResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
activeSearchParams.values().stream()
.filter(t -> isNotBlank(t.getName()))
.filter(t -> t.getTargets().contains(resourcename))
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
index f73b69a3c2d..477999b3d3e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
@@ -489,8 +489,13 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
}
if (!Constants.INCLUDE_STAR.equals(paramName)
- && mySearchParamRegistry.getActiveSearchParam(paramType, paramName) == null) {
- List validNames = mySearchParamRegistry.getActiveSearchParams(paramType).values().stream()
+ && mySearchParamRegistry.getActiveSearchParam(
+ paramType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
+ == null) {
+ List validNames = mySearchParamRegistry
+ .getActiveSearchParams(paramType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
+ .values()
+ .stream()
.filter(t -> t.getParamType() == RestSearchParameterTypeEnum.REFERENCE)
.map(t -> UrlUtil.sanitizeUrlPart(t.getName()))
.sorted()
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java
index 478c60e2040..317b8fa2105 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java
@@ -91,6 +91,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
private int mySyncSize = 250;
@Override
+ @SuppressWarnings({"rawtypes", "unchecked"})
public IBundleProvider executeQuery(
SearchParameterMap theParams,
RequestDetails theRequestDetails,
@@ -113,7 +114,6 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
.withRequestPartitionId(theRequestPartitionId)
.readOnly()
.execute(() -> {
-
// Load the results synchronously
List pids = new ArrayList<>();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java
index ea44a1fdfef..5eadee42cb1 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/QueryStack.java
@@ -55,6 +55,7 @@ import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.UriPredicateBuilder;
+import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject;
import ca.uhn.fhir.jpa.search.builder.sql.PredicateBuilderFactory;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
@@ -123,6 +124,7 @@ import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with;
+import static ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder.getResourceIdColumn;
import static ca.uhn.fhir.jpa.util.QueryParameterUtils.fromOperation;
import static ca.uhn.fhir.jpa.util.QueryParameterUtils.getChainedPart;
import static ca.uhn.fhir.jpa.util.QueryParameterUtils.getParamNameWithPrefix;
@@ -153,6 +155,7 @@ public class QueryStack {
private Map myParamNameToPredicateBuilderMap;
// used for _offset queries with sort, should be removed once the fix is applied to the async path too.
private boolean myUseAggregate;
+ private boolean myGroupingAdded;
/**
* Constructor
@@ -245,7 +248,7 @@ public class QueryStack {
resourceTablePredicateBuilder = (ResourceTablePredicateBuilder) firstPredicateBuilder;
} else {
resourceTablePredicateBuilder =
- mySqlBuilder.addResourceTablePredicateBuilder(firstPredicateBuilder.getResourceIdColumn());
+ mySqlBuilder.addResourceTablePredicateBuilder(firstPredicateBuilder.getJoinColumns());
}
mySqlBuilder.addSortDate(resourceTablePredicateBuilder.getColumnLastUpdated(), theAscending, myUseAggregate);
}
@@ -282,7 +285,7 @@ public class QueryStack {
resourceTablePredicateBuilder = (ResourceTablePredicateBuilder) firstPredicateBuilder;
} else {
resourceTablePredicateBuilder =
- mySqlBuilder.addResourceTablePredicateBuilder(firstPredicateBuilder.getResourceIdColumn());
+ mySqlBuilder.addResourceTablePredicateBuilder(firstPredicateBuilder.getJoinColumns());
}
mySqlBuilder.addSortString(resourceTablePredicateBuilder.getColumnFhirId(), theAscending, myUseAggregate);
}
@@ -315,7 +318,8 @@ public class QueryStack {
}
String targetType = null;
- RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
+ RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
+ theResourceName, theParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
if (theReferenceTargetType != null) {
targetType = theReferenceTargetType;
} else if (param.getTargets().size() > 1) {
@@ -331,17 +335,20 @@ public class QueryStack {
+ "' as this parameter as this parameter does not define a target type. Please specify the target type.");
}
- RuntimeSearchParam targetSearchParameter = mySearchParamRegistry.getActiveSearchParam(targetType, theChain);
+ RuntimeSearchParam targetSearchParameter = mySearchParamRegistry.getActiveSearchParam(
+ targetType, theChain, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
if (targetSearchParameter == null) {
- Collection validSearchParameterNames =
- mySearchParamRegistry.getActiveSearchParams(targetType).values().stream()
- .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.STRING
- || t.getParamType() == RestSearchParameterTypeEnum.TOKEN
- || t.getParamType() == RestSearchParameterTypeEnum.DATE)
- .map(RuntimeSearchParam::getName)
- .sorted()
- .distinct()
- .collect(Collectors.toList());
+ Collection validSearchParameterNames = mySearchParamRegistry
+ .getActiveSearchParams(targetType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH)
+ .values()
+ .stream()
+ .filter(t -> t.getParamType() == RestSearchParameterTypeEnum.STRING
+ || t.getParamType() == RestSearchParameterTypeEnum.TOKEN
+ || t.getParamType() == RestSearchParameterTypeEnum.DATE)
+ .map(RuntimeSearchParam::getName)
+ .sorted()
+ .distinct()
+ .collect(Collectors.toList());
String msg = myFhirContext
.getLocalizer()
.getMessageSanitized(
@@ -358,7 +365,7 @@ public class QueryStack {
case STRING:
StringPredicateBuilder stringPredicateBuilder = mySqlBuilder.createStringPredicateBuilder();
addSortCustomJoin(
- resourceLinkPredicateBuilder.getColumnTargetResourceId(),
+ resourceLinkPredicateBuilder.getJoinColumnsForTarget(),
stringPredicateBuilder,
stringPredicateBuilder.createHashIdentityPredicate(targetType, theChain));
@@ -369,7 +376,7 @@ public class QueryStack {
case TOKEN:
TokenPredicateBuilder tokenPredicateBuilder = mySqlBuilder.createTokenPredicateBuilder();
addSortCustomJoin(
- resourceLinkPredicateBuilder.getColumnTargetResourceId(),
+ resourceLinkPredicateBuilder.getJoinColumnsForTarget(),
tokenPredicateBuilder,
tokenPredicateBuilder.createHashIdentityPredicate(targetType, theChain));
@@ -380,7 +387,7 @@ public class QueryStack {
case DATE:
DatePredicateBuilder datePredicateBuilder = mySqlBuilder.createDatePredicateBuilder();
addSortCustomJoin(
- resourceLinkPredicateBuilder.getColumnTargetResourceId(),
+ resourceLinkPredicateBuilder.getJoinColumnsForTarget(),
datePredicateBuilder,
datePredicateBuilder.createHashIdentityPredicate(targetType, theChain));
@@ -405,7 +412,7 @@ public class QueryStack {
double latitudeValue = location.getLatitudeValue();
double longitudeValue = location.getLongitudeValue();
final CoordsPredicateBuilder coordsPredicateBuilder = mySqlBuilder.addCoordsPredicateBuilder(
- resourceLinkPredicateBuilder.getColumnTargetResourceId());
+ resourceLinkPredicateBuilder.getJoinColumnsForTarget());
mySqlBuilder.addSortCoordsNear(
coordsPredicateBuilder, latitudeValue, longitudeValue, theAscending);
} else {
@@ -418,6 +425,7 @@ public class QueryStack {
return;
}
}
+ //noinspection fallthrough
case NUMBER:
case REFERENCE:
case COMPOSITE:
@@ -473,16 +481,16 @@ public class QueryStack {
BaseJoiningPredicateBuilder theFromJoiningPredicateBuilder,
BaseJoiningPredicateBuilder theToJoiningPredicateBuilder,
Condition theCondition) {
- addSortCustomJoin(
- theFromJoiningPredicateBuilder.getResourceIdColumn(), theToJoiningPredicateBuilder, theCondition);
+ addSortCustomJoin(theFromJoiningPredicateBuilder.getJoinColumns(), theToJoiningPredicateBuilder, theCondition);
}
private void addSortCustomJoin(
- DbColumn theFromDbColumn,
+ DbColumn theFromDbColumn[],
BaseJoiningPredicateBuilder theToJoiningPredicateBuilder,
Condition theCondition) {
+
ComboCondition onCondition =
- mySqlBuilder.createOnCondition(theFromDbColumn, theToJoiningPredicateBuilder.getResourceIdColumn());
+ mySqlBuilder.createOnCondition(theFromDbColumn, theToJoiningPredicateBuilder.getJoinColumns());
if (theCondition != null) {
onCondition.addCondition(theCondition);
@@ -490,7 +498,7 @@ public class QueryStack {
mySqlBuilder.addCustomJoin(
SelectQuery.JoinType.LEFT_OUTER,
- theFromDbColumn.getTable(),
+ theFromDbColumn[0].getTable(),
theToJoiningPredicateBuilder.getTable(),
onCondition);
}
@@ -502,7 +510,7 @@ public class QueryStack {
@SuppressWarnings("unchecked")
private PredicateBuilderCacheLookupResult createOrReusePredicateBuilder(
PredicateBuilderTypeEnum theType,
- DbColumn theSourceJoinColumn,
+ DbColumn[] theSourceJoinColumn,
String theParamName,
Supplier theFactoryMethod) {
boolean cacheHit = false;
@@ -534,7 +542,7 @@ public class QueryStack {
}
private Condition createPredicateComposite(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theParamDef,
@@ -551,7 +559,7 @@ public class QueryStack {
}
private Condition createPredicateComposite(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theParamDef,
@@ -605,7 +613,7 @@ public class QueryStack {
}
private Condition createPredicateCompositePart(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theParam,
@@ -813,7 +821,7 @@ public class QueryStack {
}
public Condition createPredicateCoords(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -858,7 +866,7 @@ public class QueryStack {
}
public Condition createPredicateDate(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -877,7 +885,7 @@ public class QueryStack {
}
public Condition createPredicateDate(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -1003,10 +1011,11 @@ public class QueryStack {
return createPredicateSource(null, Collections.singletonList(param));
}
default:
- RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(theResourceName, paramName);
+ RuntimeSearchParam searchParam = mySearchParamRegistry.getActiveSearchParam(
+ theResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
if (searchParam == null) {
- Collection validNames =
- mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName);
+ Collection validNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(
+ theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
String msg = myFhirContext
.getLocalizer()
.getMessageSanitized(
@@ -1105,7 +1114,7 @@ public class QueryStack {
}
private Condition createPredicateHas(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceType,
List> theHasParameters,
RequestDetails theRequest,
@@ -1164,13 +1173,14 @@ public class QueryStack {
// Ensure that the name of the search param
// (e.g. the `code` in Patient?_has:Observation:subject:code=sys|val)
// exists on the target resource type.
- RuntimeSearchParam owningParameterDef =
- mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramName);
+ RuntimeSearchParam owningParameterDef = mySearchParamRegistry.getRuntimeSearchParam(
+ targetResourceType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
// Ensure that the name of the back-referenced search param on the target (e.g. the `subject` in
// Patient?_has:Observation:subject:code=sys|val)
// exists on the target resource, or in the top-level Resource resource.
- mySearchParamRegistry.getRuntimeSearchParam(targetResourceType, paramReference);
+ mySearchParamRegistry.getRuntimeSearchParam(
+ targetResourceType, paramReference, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
IQueryParameterAnd> parsedParam = JpaParamUtil.parseQueryParams(
mySearchParamRegistry, myFhirContext, owningParameterDef, paramName, parameters);
@@ -1225,7 +1235,7 @@ public class QueryStack {
resourceLinkTableJoin.getColumnSourcePath(), mySqlBuilder.generatePlaceholders(paths));
Condition linkedPredicate =
- searchForIdsWithAndOr(with().setSourceJoinColumn(resourceLinkTableJoin.getColumnSrcResourceId())
+ searchForIdsWithAndOr(with().setSourceJoinColumn(resourceLinkTableJoin.getJoinColumnsForSource())
.setResourceName(targetResourceType)
.setParamName(parameterName)
.setAndOrParams(Collections.singletonList(orValues))
@@ -1239,7 +1249,7 @@ public class QueryStack {
}
public Condition createPredicateNumber(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -1258,7 +1268,7 @@ public class QueryStack {
}
public Condition createPredicateNumber(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -1318,7 +1328,7 @@ public class QueryStack {
}
public Condition createPredicateQuantity(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -1337,7 +1347,7 @@ public class QueryStack {
}
public Condition createPredicateQuantity(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -1360,7 +1370,7 @@ public class QueryStack {
theRequestPartitionId));
} else {
List quantityParams =
- theList.stream().map(t -> QuantityParam.toQuantityParam(t)).collect(Collectors.toList());
+ theList.stream().map(QuantityParam::toQuantityParam).collect(Collectors.toList());
BaseQuantityPredicateBuilder join = null;
boolean normalizedSearchEnabled = myStorageSettings
@@ -1368,8 +1378,8 @@ public class QueryStack {
.equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED);
if (normalizedSearchEnabled) {
List normalizedQuantityParams = quantityParams.stream()
- .map(t -> UcumServiceUtil.toCanonicalQuantityOrNull(t))
- .filter(t -> t != null)
+ .map(UcumServiceUtil::toCanonicalQuantityOrNull)
+ .filter(Objects::nonNull)
.collect(Collectors.toList());
if (normalizedQuantityParams.size() == quantityParams.size()) {
@@ -1405,7 +1415,7 @@ public class QueryStack {
}
public Condition createPredicateReference(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theParamName,
List theQualifiers,
@@ -1426,7 +1436,7 @@ public class QueryStack {
}
public Condition createPredicateReference(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theParamName,
List theQualifiers,
@@ -1473,17 +1483,33 @@ public class QueryStack {
}
public void addGrouping() {
- BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder();
- mySqlBuilder.getSelect().addGroupings(firstPredicateBuilder.getResourceIdColumn());
+ if (!myGroupingAdded) {
+ BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder();
+
+ /*
+ * Postgres and Oracle don't like it if we are doing a SELECT DISTINCT
+ * with multiple selected columns but no GROUP BY clause.
+ */
+ if (mySqlBuilder.isSelectPartitionId()) {
+ mySqlBuilder
+ .getSelect()
+ .addGroupings(
+ firstPredicateBuilder.getPartitionIdColumn(),
+ firstPredicateBuilder.getResourceIdColumn());
+ } else {
+ mySqlBuilder.getSelect().addGroupings(firstPredicateBuilder.getJoinColumns());
+ }
+ myGroupingAdded = true;
+ }
}
public void addOrdering() {
BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder();
- mySqlBuilder.getSelect().addOrderings(firstPredicateBuilder.getResourceIdColumn());
+ mySqlBuilder.getSelect().addOrderings(firstPredicateBuilder.getJoinColumns());
}
public Condition createPredicateReferenceForEmbeddedChainedSearchResource(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
RuntimeSearchParam theSearchParam,
List extends IQueryParameterType> theList,
@@ -1527,12 +1553,12 @@ public class QueryStack {
for (LeafNodeDefinition leafNodeDefinition : referenceLinks.get(nextReferenceLink)) {
SearchQueryBuilder builder;
if (wantChainedAndNormal) {
- builder = mySqlBuilder.newChildSqlBuilder();
+ builder = mySqlBuilder.newChildSqlBuilder(mySqlBuilder.isIncludePartitionIdInJoins());
} else {
builder = mySqlBuilder;
}
- DbColumn previousJoinColumn = null;
+ DbColumn[] previousJoinColumn = null;
// Create a reference link predicates to the subselect for every link but the last one
for (String nextLink : nextReferenceLink) {
@@ -1543,7 +1569,7 @@ public class QueryStack {
builder.addReferencePredicateBuilder(this, previousJoinColumn);
builder.addPredicate(
resourceLinkPredicateBuilder.createPredicateSourcePaths(Lists.newArrayList(nextLink)));
- previousJoinColumn = resourceLinkPredicateBuilder.getColumnTargetResourceId();
+ previousJoinColumn = resourceLinkPredicateBuilder.getJoinColumnsForTarget();
}
Condition containedCondition = createIndexPredicate(
@@ -1572,8 +1598,15 @@ public class QueryStack {
if (wantChainedAndNormal) {
if (theSourceJoinColumn == null) {
- retVal = new InCondition(
- mySqlBuilder.getOrCreateFirstPredicateBuilder(false).getResourceIdColumn(), union);
+ BaseJoiningPredicateBuilder root = mySqlBuilder.getOrCreateFirstPredicateBuilder(false);
+ DbColumn[] joinColumns = root.getJoinColumns();
+ Object joinColumnObject;
+ if (joinColumns.length == 1) {
+ joinColumnObject = joinColumns[0];
+ } else {
+ joinColumnObject = ColumnTupleObject.from(joinColumns);
+ }
+ retVal = new InCondition(joinColumnObject, union);
} else {
// -- for the resource link, need join with target_resource_id
retVal = new InCondition(theSourceJoinColumn, union);
@@ -1769,7 +1802,7 @@ public class QueryStack {
}
private Condition createIndexPredicate(
- DbColumn theSourceJoinColumn,
+ DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
String theParamName,
@@ -1883,7 +1916,7 @@ public class QueryStack {
@Nullable
public Condition createPredicateResourceId(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
List> theValues,
String theResourceName,
SearchFilterParser.CompareOperation theOperation,
@@ -1894,7 +1927,7 @@ public class QueryStack {
}
private Condition createPredicateSourceForAndList(
- @Nullable DbColumn theSourceJoinColumn, List> theAndOrParams) {
+ @Nullable DbColumn[] theSourceJoinColumn, List> theAndOrParams) {
mySqlBuilder.getOrCreateFirstPredicateBuilder();
List andPredicates = new ArrayList<>(theAndOrParams.size());
@@ -1905,7 +1938,7 @@ public class QueryStack {
}
private Condition createPredicateSource(
- @Nullable DbColumn theSourceJoinColumn, List extends IQueryParameterType> theList) {
+ @Nullable DbColumn[] theSourceJoinColumn, List extends IQueryParameterType> theList) {
if (myStorageSettings.getStoreMetaSourceInformation()
== JpaStorageSettings.StoreMetaSourceInformationEnum.NONE) {
String msg = myFhirContext.getLocalizer().getMessage(QueryStack.class, "sourceParamDisabled");
@@ -1948,7 +1981,7 @@ public class QueryStack {
}
private SourcePredicateBuilder getSourcePredicateBuilder(
- @Nullable DbColumn theSourceJoinColumn, SelectQuery.JoinType theJoinType) {
+ @Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) {
return createOrReusePredicateBuilder(
PredicateBuilderTypeEnum.SOURCE,
theSourceJoinColumn,
@@ -1958,7 +1991,7 @@ public class QueryStack {
}
public Condition createPredicateString(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -1977,7 +2010,7 @@ public class QueryStack {
}
public Condition createPredicateString(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -2017,7 +2050,7 @@ public class QueryStack {
}
public Condition createPredicateTag(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
List> theList,
String theParamName,
RequestPartitionId theRequestPartitionId) {
@@ -2048,7 +2081,8 @@ public class QueryStack {
BaseJoiningPredicateBuilder join;
if (paramInverted) {
- SearchQueryBuilder sqlBuilder = mySqlBuilder.newChildSqlBuilder();
+ boolean selectPartitionId = myPartitionSettings.isPartitionIdsInPrimaryKeys();
+ SearchQueryBuilder sqlBuilder = mySqlBuilder.newChildSqlBuilder(selectPartitionId);
TagPredicateBuilder tagSelector = sqlBuilder.addTagPredicateBuilder(null);
sqlBuilder.addPredicate(
tagSelector.createPredicateTag(tagType, tokens, theParamName, theRequestPartitionId));
@@ -2056,7 +2090,14 @@ public class QueryStack {
join = mySqlBuilder.getOrCreateFirstPredicateBuilder();
Expression subSelect = new Subquery(sql);
- tagPredicate = new InCondition(join.getResourceIdColumn(), subSelect).setNegate(true);
+
+ Object left;
+ if (selectPartitionId) {
+ left = new ColumnTupleObject(join.getJoinColumns());
+ } else {
+ left = join.getResourceIdColumn();
+ }
+ tagPredicate = new InCondition(left, subSelect).setNegate(true);
} else {
// Tag table can't be a query root because it will include deleted resources, and can't select by
@@ -2129,7 +2170,7 @@ public class QueryStack {
}
public Condition createPredicateToken(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -2148,7 +2189,7 @@ public class QueryStack {
}
public Condition createPredicateToken(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -2219,7 +2260,8 @@ public class QueryStack {
BaseJoiningPredicateBuilder join;
if (paramInverted) {
- SearchQueryBuilder sqlBuilder = theSqlBuilder.newChildSqlBuilder();
+ boolean selectPartitionId = myPartitionSettings.isPartitionIdsInPrimaryKeys();
+ SearchQueryBuilder sqlBuilder = theSqlBuilder.newChildSqlBuilder(selectPartitionId);
TokenPredicateBuilder tokenSelector = sqlBuilder.addTokenPredicateBuilder(null);
sqlBuilder.addPredicate(tokenSelector.createPredicateToken(
tokens, theResourceName, theSpnamePrefix, theSearchParam, theRequestPartitionId));
@@ -2228,13 +2270,16 @@ public class QueryStack {
join = theSqlBuilder.getOrCreateFirstPredicateBuilder();
+ DbColumn[] leftColumns;
if (theSourceJoinColumn == null) {
- predicate = new InCondition(join.getResourceIdColumn(), subSelect).setNegate(true);
+ leftColumns = join.getJoinColumns();
} else {
- // -- for the resource link, need join with target_resource_id
- predicate = new InCondition(theSourceJoinColumn, subSelect).setNegate(true);
+ leftColumns = theSourceJoinColumn;
}
+ Object left = new ColumnTupleObject(leftColumns);
+ predicate = new InCondition(left, subSelect).setNegate(true);
+
} else {
Boolean isMissing = theList.get(0).getMissing();
if (isMissing != null) {
@@ -2264,7 +2309,7 @@ public class QueryStack {
}
public Condition createPredicateUri(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -2285,7 +2330,7 @@ public class QueryStack {
}
public Condition createPredicateUri(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theSpnamePrefix,
RuntimeSearchParam theSearchParam,
@@ -2400,9 +2445,9 @@ public class QueryStack {
* Raw match on RES_ID
*/
private Condition createPredicateResourcePID(
- DbColumn theSourceJoinColumn, List> theAndOrParams) {
+ DbColumn[] theSourceJoinColumn, List> theAndOrParams) {
- DbColumn pidColumn = theSourceJoinColumn;
+ DbColumn pidColumn = getResourceIdColumn(theSourceJoinColumn);
if (pidColumn == null) {
BaseJoiningPredicateBuilder predicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder();
@@ -2427,7 +2472,7 @@ public class QueryStack {
}
private Condition createReverseSearchPredicateLastUpdated(
- List> theAndOrParams, DbColumn theSourceColumn) {
+ List> theAndOrParams, DbColumn[] theSourceColumn) {
ResourceTablePredicateBuilder resourceTableJoin =
mySqlBuilder.addResourceTablePredicateBuilder(theSourceColumn);
@@ -2448,14 +2493,15 @@ public class QueryStack {
@Nullable
private Condition createPredicateSearchParameter(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theParamName,
List> theAndOrParams,
RequestDetails theRequest,
RequestPartitionId theRequestPartitionId) {
List andPredicates = new ArrayList<>();
- RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(theResourceName, theParamName);
+ RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam(
+ theResourceName, theParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
if (nextParamDef != null) {
if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.isIncludePartitionInSearchHashes()) {
@@ -2662,15 +2708,33 @@ public class QueryStack {
}
} else {
- String msg = myFhirContext
- .getLocalizer()
- .getMessageSanitized(
- BaseStorageDao.class,
- "invalidSearchParameter",
- theParamName,
- theResourceName,
- mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName));
- throw new InvalidRequestException(Msg.code(1223) + msg);
+ RuntimeSearchParam notEnabledForSearchParam = mySearchParamRegistry.getActiveSearchParam(
+ theResourceName, theParamName, ISearchParamRegistry.SearchParamLookupContextEnum.ALL);
+ if (notEnabledForSearchParam == null) {
+ String msg = myFhirContext
+ .getLocalizer()
+ .getMessageSanitized(
+ BaseStorageDao.class,
+ "invalidSearchParameter",
+ theParamName,
+ theResourceName,
+ mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(
+ theResourceName,
+ ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH));
+ throw new InvalidRequestException(Msg.code(1223) + msg);
+ } else {
+ String msg = myFhirContext
+ .getLocalizer()
+ .getMessageSanitized(
+ BaseStorageDao.class,
+ "invalidSearchParameterNotEnabledForSearch",
+ theParamName,
+ theResourceName,
+ mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(
+ theResourceName,
+ ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH));
+ throw new InvalidRequestException(Msg.code(2540) + msg);
+ }
}
}
}
@@ -2690,7 +2754,7 @@ public class QueryStack {
* by this method
*/
private boolean handleFullyChainedParameter(
- @Nullable DbColumn theSourceJoinColumn,
+ @Nullable DbColumn[] theSourceJoinColumn,
String theResourceName,
String theParamName,
RequestDetails theRequest,
@@ -2701,8 +2765,8 @@ public class QueryStack {
ReferenceParam param = (ReferenceParam) nextAnd.get(0);
if (isNotBlank(param.getChain())) {
String fullName = theParamName + "." + param.getChain();
- RuntimeSearchParam fullChainParam =
- mySearchParamRegistry.getActiveSearchParam(theResourceName, fullName);
+ RuntimeSearchParam fullChainParam = mySearchParamRegistry.getActiveSearchParam(
+ theResourceName, fullName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
if (fullChainParam != null) {
List swappedParamTypes = nextAnd.stream()
.map(t -> newParameterInstance(fullChainParam, null, t.getValueAsQueryToken(myFhirContext)))
@@ -2769,8 +2833,10 @@ public class QueryStack {
if (indexOnContainedResources) {
return true;
}
- RuntimeSearchParam param =
- mySearchParamRegistry.getActiveSearchParam(theResourceType, theParameterName);
+ RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
+ theResourceType,
+ theParameterName,
+ ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
return param != null && param.hasUpliftRefchain(t);
});
@@ -3010,7 +3076,8 @@ public class QueryStack {
for (String nextTarget : thePreviousSearchParam.getTargets()) {
RuntimeSearchParam nextSearchParam = null;
if (isBlank(theResourceType) || theResourceType.equals(nextTarget)) {
- nextSearchParam = mySearchParamRegistry.getActiveSearchParam(nextTarget, nextParamName);
+ nextSearchParam = mySearchParamRegistry.getActiveSearchParam(
+ nextTarget, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
}
if (nextSearchParam != null) {
searchParamFound = true;
@@ -3147,7 +3214,7 @@ public class QueryStack {
}
public static class SearchForIdsParams {
- DbColumn mySourceJoinColumn;
+ DbColumn[] mySourceJoinColumn;
String myResourceName;
String myParamName;
List> myAndOrParams;
@@ -3159,11 +3226,11 @@ public class QueryStack {
return new SearchForIdsParams();
}
- public DbColumn getSourceJoinColumn() {
+ public DbColumn[] getSourceJoinColumn() {
return mySourceJoinColumn;
}
- public SearchForIdsParams setSourceJoinColumn(DbColumn theSourceJoinColumn) {
+ public SearchForIdsParams setSourceJoinColumn(DbColumn[] theSourceJoinColumn) {
mySourceJoinColumn = theSourceJoinColumn;
return this;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
index 59ecfa233ae..a7e9ebc32f9 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
@@ -121,9 +121,11 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
-import org.springframework.jdbc.core.SingleColumnRowMapper;
+import org.springframework.jdbc.core.RowMapper;
import org.springframework.transaction.support.TransactionSynchronizationManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -131,6 +133,7 @@ import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -140,7 +143,7 @@ import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE;
import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION;
import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with;
-import static ca.uhn.fhir.jpa.util.InClauseNormalizer.*;
+import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause;
import static java.util.Objects.requireNonNull;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import static org.apache.commons.lang3.StringUtils.defaultString;
@@ -161,7 +164,6 @@ public class SearchBuilder implements ISearchBuilder {
@Deprecated
public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE;
- public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50;
public static final String RESOURCE_ID_ALIAS = "resource_id";
public static final String RESOURCE_VERSION_ALIAS = "resource_version";
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
@@ -171,7 +173,7 @@ public class SearchBuilder implements ISearchBuilder {
private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType";
private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType";
private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion";
- public static boolean myUseMaxPageSize50ForTest = false;
+ public static Integer myMaxPageSizeForTests = null;
protected final IInterceptorBroadcaster myInterceptorBroadcaster;
protected final IResourceTagDao myResourceTagDao;
private String myResourceName;
@@ -464,6 +466,8 @@ public class SearchBuilder implements ISearchBuilder {
.chunk(
fulltextExecutor,
SearchBuilder.getMaximumPageSize(),
+ // for each list of (SearchBuilder.getMaximumPageSize())
+ // we create a chunked query and add it to 'queries'
t -> doCreateChunkedQueries(
theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries));
}
@@ -651,8 +655,8 @@ public class SearchBuilder implements ISearchBuilder {
|| theParams.getSort() != null
|| theParams.keySet().contains(Constants.PARAM_HAS)
|| isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) {
- List activeComboParams =
- mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
+ List activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(
+ myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
if (activeComboParams.isEmpty()) {
sqlBuilder.setNeedResourceTableRoot(true);
}
@@ -799,7 +803,16 @@ public class SearchBuilder implements ISearchBuilder {
String sql = allTargetsSql.getSql();
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
- List output = jdbcTemplate.query(sql, args, new SingleColumnRowMapper<>(Long.class));
+ List output = jdbcTemplate.query(sql, args, new RowMapper() {
+ @Override
+ public Long mapRow(ResultSet rs, int rowNum) throws SQLException {
+ if (myPartitionSettings.isPartitioningEnabled()) {
+ return rs.getLong(2);
+ } else {
+ return rs.getLong(1);
+ }
+ }
+ });
// we add a search executor to fetch unlinked patients first
theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output));
@@ -915,8 +928,8 @@ public class SearchBuilder implements ISearchBuilder {
theQueryStack.addSortOnLastUpdated(ascending);
} else {
- RuntimeSearchParam param =
- mySearchParamRegistry.getActiveSearchParam(myResourceName, theSort.getParamName());
+ RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam(
+ myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
/*
* If we have a sort like _sort=subject.name and we have an
@@ -940,8 +953,8 @@ public class SearchBuilder implements ISearchBuilder {
referenceParamTargetType = referenceParam.substring(0, colonIdx);
referenceParam = referenceParam.substring(colonIdx + 1);
}
- RuntimeSearchParam outerParam =
- mySearchParamRegistry.getActiveSearchParam(myResourceName, referenceParam);
+ RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam(
+ myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
if (outerParam == null) {
throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam);
} else if (outerParam.hasUpliftRefchain(targetParam)) {
@@ -949,8 +962,10 @@ public class SearchBuilder implements ISearchBuilder {
if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) {
continue;
}
- RuntimeSearchParam innerParam =
- mySearchParamRegistry.getActiveSearchParam(nextTargetType, targetParam);
+ RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam(
+ nextTargetType,
+ targetParam,
+ ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
if (innerParam != null) {
param = innerParam;
break;
@@ -984,7 +999,8 @@ public class SearchBuilder implements ISearchBuilder {
}
if (param == null) {
- param = mySearchParamRegistry.getActiveSearchParam(myResourceName, paramName);
+ param = mySearchParamRegistry.getActiveSearchParam(
+ myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
}
if (param == null) {
@@ -1063,8 +1079,8 @@ public class SearchBuilder implements ISearchBuilder {
}
private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) {
- Collection validSearchParameterNames =
- mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(theResourceName);
+ Collection validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta(
+ theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT);
String msg = myContext
.getLocalizer()
.getMessageSanitized(
@@ -1527,7 +1543,8 @@ public class SearchBuilder implements ISearchBuilder {
String paramName = nextInclude.getParamName();
if (isNotBlank(paramName)) {
- param = mySearchParamRegistry.getActiveSearchParam(resType, paramName);
+ param = mySearchParamRegistry.getActiveSearchParam(
+ resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
} else {
param = null;
}
@@ -1584,16 +1601,37 @@ public class SearchBuilder implements ISearchBuilder {
String sql = localReferenceQuery + " UNION " + canonicalQuery.getLeft();
+ Map limitParams = new HashMap<>();
+ if (maxCount != null) {
+ LinkedList