merge master in

This commit is contained in:
juan.marchionatto 2024-11-28 16:33:47 -05:00
commit 6c4ab554fa
212 changed files with 4728 additions and 1919 deletions

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -713,6 +713,8 @@ public class FhirContext {
"org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport";
String commonCodeSystemsSupportType =
"org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService";
String snapshotGeneratingType =
"org.hl7.fhir.common.hapi.validation.support.SnapshotGeneratingValidationSupport";
if (ReflectionUtil.typeExists(inMemoryTermSvcType)) {
IValidationSupport inMemoryTermSvc = ReflectionUtil.newInstanceOrReturnNull(
inMemoryTermSvcType,
@ -724,11 +726,23 @@ public class FhirContext {
IValidationSupport.class,
new Class<?>[] {FhirContext.class},
new Object[] {this});
IValidationSupport snapshotGeneratingSupport = null;
if (getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) {
snapshotGeneratingSupport = ReflectionUtil.newInstanceOrReturnNull(
snapshotGeneratingType,
IValidationSupport.class,
new Class<?>[] {FhirContext.class},
new Object[] {this});
}
retVal = ReflectionUtil.newInstanceOrReturnNull(
"org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain",
IValidationSupport.class,
new Class<?>[] {IValidationSupport[].class},
new Object[] {new IValidationSupport[] {retVal, inMemoryTermSvc, commonCodeSystemsSupport}});
new Object[] {
new IValidationSupport[] {
retVal, inMemoryTermSvc, commonCodeSystemsSupport, snapshotGeneratingSupport
}
});
assert retVal != null
: "Failed to instantiate "
+ "org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain";
@ -1293,7 +1307,15 @@ public class FhirContext {
* @since 5.1.0
*/
public static FhirContext forCached(FhirVersionEnum theFhirVersionEnum) {
return ourStaticContexts.computeIfAbsent(theFhirVersionEnum, v -> new FhirContext(v));
return ourStaticContexts.computeIfAbsent(theFhirVersionEnum, FhirContext::forVersion);
}
/**
* An uncached version of forCached()
* @return a new FhirContext for theFhirVersionEnum
*/
public static FhirContext forVersion(FhirVersionEnum theFhirVersionEnum) {
return new FhirContext(theFhirVersionEnum);
}
private static Collection<Class<? extends IBaseResource>> toCollection(

View File

@ -135,15 +135,19 @@ public enum FhirVersionEnum {
/**
* Creates a new FhirContext for this FHIR version
* @deprecated since 7.7. Use {@link FhirContext#forVersion(FhirVersionEnum)} instead
*/
@Deprecated(forRemoval = true, since = "7.7")
public FhirContext newContext() {
return new FhirContext(this);
return FhirContext.forVersion(this);
}
/**
* Creates a new FhirContext for this FHIR version, or returns a previously created one if one exists. This
* method uses {@link FhirContext#forCached(FhirVersionEnum)} to return a cached instance.
* @deprecated since 7.7. Use {@link FhirContext#forCached(FhirVersionEnum)} instead
*/
@Deprecated(forRemoval = true, since = "7.7")
public FhirContext newContextCached() {
return FhirContext.forCached(this);
}

View File

@ -22,11 +22,26 @@ package ca.uhn.fhir.context.support;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import java.util.Objects;
public class ConceptValidationOptions {
private boolean myValidateDisplay;
private boolean myInferSystem;
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof ConceptValidationOptions)) return false;
ConceptValidationOptions that = (ConceptValidationOptions) theO;
return myValidateDisplay == that.myValidateDisplay && myInferSystem == that.myInferSystem;
}
@Override
public int hashCode() {
return Objects.hash(myValidateDisplay, myInferSystem);
}
public boolean isInferSystem() {
return myInferSystem;
}

View File

@ -23,7 +23,9 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.util.ILockable;
import ca.uhn.fhir.util.ReflectionUtil;
import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
@ -48,6 +50,13 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
private static final Map<FhirVersionEnum, IValidationSupport> ourImplementations =
Collections.synchronizedMap(new HashMap<>());
/**
* Userdata key indicating the source package ID for this package
*/
public static final String SOURCE_PACKAGE_ID =
DefaultProfileValidationSupport.class.getName() + "_SOURCE_PACKAGE_ID";
private final FhirContext myCtx;
/**
* This module just delegates all calls to a concrete implementation which will
@ -62,7 +71,8 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
*
* @param theFhirContext The context to use
*/
public DefaultProfileValidationSupport(FhirContext theFhirContext) {
public DefaultProfileValidationSupport(@Nonnull FhirContext theFhirContext) {
Validate.notNull(theFhirContext, "FhirContext must not be null");
myCtx = theFhirContext;
IValidationSupport strategy;
@ -106,33 +116,45 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
@Override
public List<IBaseResource> fetchAllConformanceResources() {
return myDelegate.fetchAllConformanceResources();
List<IBaseResource> retVal = myDelegate.fetchAllConformanceResources();
addPackageInformation(retVal);
return retVal;
}
@Override
public <T extends IBaseResource> List<T> fetchAllStructureDefinitions() {
return myDelegate.fetchAllStructureDefinitions();
List<T> retVal = myDelegate.fetchAllStructureDefinitions();
addPackageInformation(retVal);
return retVal;
}
@Nullable
@Override
public <T extends IBaseResource> List<T> fetchAllNonBaseStructureDefinitions() {
return myDelegate.fetchAllNonBaseStructureDefinitions();
List<T> retVal = myDelegate.fetchAllNonBaseStructureDefinitions();
addPackageInformation(retVal);
return retVal;
}
@Override
public IBaseResource fetchCodeSystem(String theSystem) {
return myDelegate.fetchCodeSystem(theSystem);
IBaseResource retVal = myDelegate.fetchCodeSystem(theSystem);
addPackageInformation(retVal);
return retVal;
}
@Override
public IBaseResource fetchStructureDefinition(String theUrl) {
return myDelegate.fetchStructureDefinition(theUrl);
IBaseResource retVal = myDelegate.fetchStructureDefinition(theUrl);
addPackageInformation(retVal);
return retVal;
}
@Override
public IBaseResource fetchValueSet(String theUrl) {
return myDelegate.fetchValueSet(theUrl);
IBaseResource retVal = myDelegate.fetchValueSet(theUrl);
addPackageInformation(retVal);
return retVal;
}
public void flush() {
@ -158,4 +180,43 @@ public class DefaultProfileValidationSupport implements IValidationSupport {
}
return urlValueString;
}
private <T extends IBaseResource> void addPackageInformation(List<T> theResources) {
if (theResources != null) {
theResources.forEach(this::addPackageInformation);
}
}
private void addPackageInformation(IBaseResource theResource) {
if (theResource != null) {
String sourcePackageId = null;
switch (myCtx.getVersion().getVersion()) {
case DSTU2:
case DSTU2_HL7ORG:
sourcePackageId = "hl7.fhir.r2.core";
break;
case DSTU2_1:
return;
case DSTU3:
sourcePackageId = "hl7.fhir.r3.core";
break;
case R4:
sourcePackageId = "hl7.fhir.r4.core";
break;
case R4B:
sourcePackageId = "hl7.fhir.r4b.core";
break;
case R5:
sourcePackageId = "hl7.fhir.r5.core";
break;
}
Validate.notNull(
sourcePackageId,
"Don't know how to handle package ID: %s",
myCtx.getVersion().getVersion());
theResource.setUserData(SOURCE_PACKAGE_ID, sourcePackageId);
}
}
}

View File

@ -41,6 +41,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors;
@ -116,7 +117,8 @@ public interface IValidationSupport {
@Nonnull String theValueSetUrlToExpand)
throws ResourceNotFoundException {
Validate.notBlank(theValueSetUrlToExpand, "theValueSetUrlToExpand must not be null or blank");
IBaseResource valueSet = fetchValueSet(theValueSetUrlToExpand);
IBaseResource valueSet =
theValidationSupportContext.getRootValidationSupport().fetchValueSet(theValueSetUrlToExpand);
if (valueSet == null) {
throw new ResourceNotFoundException(
Msg.code(2024) + "Unknown ValueSet: " + UrlUtil.escapeUrlParam(theValueSetUrlToExpand));
@ -212,8 +214,8 @@ public interface IValidationSupport {
() -> fetchStructureDefinition(theUri), () -> fetchValueSet(theUri), () -> fetchCodeSystem(theUri)
};
return (T) Arrays.stream(sources)
.map(t -> t.get())
.filter(t -> t != null)
.map(Supplier::get)
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
}
@ -792,6 +794,7 @@ public interface IValidationSupport {
return myValue;
}
@Override
public String getType() {
return TYPE_STRING;
}
@ -826,6 +829,7 @@ public interface IValidationSupport {
return myDisplay;
}
@Override
public String getType() {
return TYPE_CODING;
}
@ -1431,10 +1435,9 @@ public interface IValidationSupport {
}
/**
* <p
* Warning: This method's behaviour and naming is preserved for backwards compatibility, BUT the actual naming and
* function are not aligned.
* </p
* When validating a CodeableConcept containing multiple codings, this method can be used to control whether
* the validator requires all codings in the CodeableConcept to be valid in order to consider the
* CodeableConcept valid.
* <p>
* See VersionSpecificWorkerContextWrapper#validateCode in hapi-fhir-validation, and the refer to the values below
* for the behaviour associated with each value.
@ -1449,7 +1452,7 @@ public interface IValidationSupport {
* </p>
* @return true or false depending on the desired coding validation behaviour.
*/
default boolean isEnabledValidationForCodingsLogicalAnd() {
default boolean isCodeableConceptValidationSuccessfulIfNotAllCodingsAreValid() {
return false;
}
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.context.support;
import java.util.Collection;
import java.util.Collections;
import java.util.Objects;
/**
* Represents parameters which can be passed to the $lookup operation for codes.
@ -72,4 +73,20 @@ public class LookupCodeRequest {
}
return myPropertyNames;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof LookupCodeRequest)) return false;
LookupCodeRequest that = (LookupCodeRequest) theO;
return Objects.equals(mySystem, that.mySystem)
&& Objects.equals(myCode, that.myCode)
&& Objects.equals(myDisplayLanguage, that.myDisplayLanguage)
&& Objects.equals(myPropertyNames, that.myPropertyNames);
}
@Override
public int hashCode() {
return Objects.hash(mySystem, myCode, myDisplayLanguage, myPropertyNames);
}
}

View File

@ -42,7 +42,7 @@ public class ValidationSupportContext {
return myCurrentlyGeneratingSnapshots;
}
public boolean isEnabledValidationForCodingsLogicalAnd() {
return myRootValidationSupport.isEnabledValidationForCodingsLogicalAnd();
public boolean isCodeableConceptValidationSuccessfulIfNotAllCodingsAreValid() {
return myRootValidationSupport.isCodeableConceptValidationSuccessfulIfNotAllCodingsAreValid();
}
}

View File

@ -21,6 +21,8 @@ package ca.uhn.fhir.context.support;
import org.apache.commons.lang3.Validate;
import java.util.Objects;
/**
* Options for ValueSet expansion
*
@ -126,4 +128,23 @@ public class ValueSetExpansionOptions {
myDisplayLanguage = theDisplayLanguage;
return this;
}
@Override
public boolean equals(Object theO) {
if (this == theO) return true;
if (!(theO instanceof ValueSetExpansionOptions)) return false;
ValueSetExpansionOptions that = (ValueSetExpansionOptions) theO;
return myFailOnMissingCodeSystem == that.myFailOnMissingCodeSystem
&& myCount == that.myCount
&& myOffset == that.myOffset
&& myIncludeHierarchy == that.myIncludeHierarchy
&& Objects.equals(myFilter, that.myFilter)
&& Objects.equals(myDisplayLanguage, that.myDisplayLanguage);
}
@Override
public int hashCode() {
return Objects.hash(
myFailOnMissingCodeSystem, myCount, myOffset, myIncludeHierarchy, myFilter, myDisplayLanguage);
}
}

View File

@ -46,7 +46,11 @@ public @interface Hook {
* and allowable values can be positive or negative or 0.
* <p>
* If no order is specified, or the order is set to <code>0</code> (the default order),
* the order specified at the interceptor type level will take precedence.
* the order specified at the {@link Interceptor#order() interceptor type level} will be used.
* </p>
* <p>
* Note that if two hook methods have the same order, then the order of execution is undefined. If
* order is important, then an order must always be explicitly stated.
* </p>
*/
int order() default Interceptor.DEFAULT_ORDER;

View File

@ -19,6 +19,7 @@
*/
package ca.uhn.fhir.interceptor.api;
import java.util.List;
import java.util.function.Supplier;
public interface IBaseInterceptorBroadcaster<POINTCUT extends IPointcut> {
@ -73,4 +74,15 @@ public interface IBaseInterceptorBroadcaster<POINTCUT extends IPointcut> {
* @since 4.0.0
*/
boolean hasHooks(POINTCUT thePointcut);
List<IInvoker> getInvokersForPointcut(POINTCUT thePointcut);
interface IInvoker extends Comparable<IInvoker> {
Object invoke(HookParams theParams);
int getOrder();
Object getInterceptor();
}
}

View File

@ -27,6 +27,8 @@ public interface IPointcut {
@Nonnull
Class<?> getReturnType();
Class<?> getBooleanReturnTypeForEnum();
@Nonnull
List<String> getParameterTypes();

View File

@ -26,6 +26,7 @@ import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
import ca.uhn.fhir.validation.ValidationResult;
import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseConformance;
import java.io.Writer;
@ -3107,6 +3108,10 @@ public enum Pointcut implements IPointcut {
@Nonnull Class<?> theReturnType,
@Nonnull ExceptionHandlingSpec theExceptionHandlingSpec,
String... theParameterTypes) {
// This enum uses the lowercase-b boolean type to indicate boolean return pointcuts
Validate.isTrue(!theReturnType.equals(Boolean.class), "Return type Boolean not allowed here, must be boolean");
myReturnType = theReturnType;
myExceptionHandlingSpec = theExceptionHandlingSpec;
myParameterTypes = Collections.unmodifiableList(Arrays.asList(theParameterTypes));
@ -3132,6 +3137,11 @@ public enum Pointcut implements IPointcut {
return myReturnType;
}
@Override
public Class<?> getBooleanReturnTypeForEnum() {
return boolean.class;
}
@Override
@Nonnull
public List<String> getParameterTypes() {

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.interceptor.executor;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IBaseInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.IBaseInterceptorService;
@ -57,12 +58,13 @@ import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & IPointcut>
implements IBaseInterceptorService<POINTCUT>, IBaseInterceptorBroadcaster<POINTCUT> {
private static final Logger ourLog = LoggerFactory.getLogger(BaseInterceptorService.class);
@ -74,12 +76,11 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
AttributeKey.stringKey("hapifhir.interceptor.method_name");
private final List<Object> myInterceptors = new ArrayList<>();
private final ListMultimap<POINTCUT, BaseInvoker> myGlobalInvokers = ArrayListMultimap.create();
private final ListMultimap<POINTCUT, BaseInvoker> myAnonymousInvokers = ArrayListMultimap.create();
private final ListMultimap<POINTCUT, IInvoker> myGlobalInvokers = ArrayListMultimap.create();
private final ListMultimap<POINTCUT, IInvoker> myAnonymousInvokers = ArrayListMultimap.create();
private final Object myRegistryMutex = new Object();
private final Class<POINTCUT> myPointcutType;
private volatile EnumSet<POINTCUT> myRegisteredPointcuts;
private String myName;
private boolean myWarnOnInterceptorWithNoHooks = true;
/**
@ -93,10 +94,11 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
* Constructor
*
* @param theName The name for this registry (useful for troubleshooting)
* @deprecated The name parameter is not used for anything
*/
@Deprecated(since = "8.0.0", forRemoval = true)
public BaseInterceptorService(Class<POINTCUT> thePointcutType, String theName) {
super();
myName = theName;
myPointcutType = thePointcutType;
rebuildRegisteredPointcutSet();
}
@ -113,13 +115,17 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
return myInterceptors;
}
public void setName(String theName) {
myName = theName;
/**
* @deprecated This value is not used anywhere
*/
@Deprecated(since = "8.0.0", forRemoval = true)
public void setName(@SuppressWarnings("unused") String theName) {
// nothing
}
protected void registerAnonymousInterceptor(POINTCUT thePointcut, Object theInterceptor, BaseInvoker theInvoker) {
Validate.notNull(thePointcut);
Validate.notNull(theInterceptor);
Validate.notNull(thePointcut, "thePointcut must not be null");
Validate.notNull(theInterceptor, "theInterceptor must not be null");
synchronized (myRegistryMutex) {
myAnonymousInvokers.put(thePointcut, theInvoker);
if (!isInterceptorAlreadyRegistered(theInterceptor)) {
@ -179,9 +185,9 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
}
private void unregisterInterceptorsIf(
Predicate<Object> theShouldUnregisterFunction, ListMultimap<POINTCUT, BaseInvoker> theGlobalInvokers) {
Predicate<Object> theShouldUnregisterFunction, ListMultimap<POINTCUT, IInvoker> theGlobalInvokers) {
synchronized (myRegistryMutex) {
for (Map.Entry<POINTCUT, BaseInvoker> nextInvoker : new ArrayList<>(theGlobalInvokers.entries())) {
for (Map.Entry<POINTCUT, IInvoker> nextInvoker : new ArrayList<>(theGlobalInvokers.entries())) {
if (theShouldUnregisterFunction.test(nextInvoker.getValue().getInterceptor())) {
unregisterInterceptor(nextInvoker.getValue().getInterceptor());
}
@ -265,7 +271,7 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
assert haveAppropriateParams(thePointcut, theParams);
assert thePointcut.getReturnType() != void.class;
return doCallHooks(thePointcut, theParams, null);
return doCallHooks(thePointcut, theParams);
}
@Override
@ -282,116 +288,47 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
assert haveAppropriateParams(thePointcut, theParams);
assert thePointcut.getReturnType() == void.class || thePointcut.getReturnType() == getBooleanReturnType();
Object retValObj = doCallHooks(thePointcut, theParams, true);
Object retValObj = doCallHooks(thePointcut, theParams);
retValObj = defaultIfNull(retValObj, true);
return (Boolean) retValObj;
}
private Object doCallHooks(POINTCUT thePointcut, HookParams theParams, Object theRetVal) {
// use new list for loop to avoid ConcurrentModificationException in case invoker gets added while looping
List<BaseInvoker> invokers = new ArrayList<>(getInvokersForPointcut(thePointcut));
/*
* Call each hook in order
*/
for (BaseInvoker nextInvoker : invokers) {
Object nextOutcome = nextInvoker.invoke(theParams);
Class<?> pointcutReturnType = thePointcut.getReturnType();
if (pointcutReturnType.equals(getBooleanReturnType())) {
Boolean nextOutcomeAsBoolean = (Boolean) nextOutcome;
if (Boolean.FALSE.equals(nextOutcomeAsBoolean)) {
ourLog.trace("callHooks({}) for invoker({}) returned false", thePointcut, nextInvoker);
theRetVal = false;
break;
} else {
theRetVal = true;
}
} else if (!pointcutReturnType.equals(void.class)) {
if (nextOutcome != null) {
theRetVal = nextOutcome;
break;
}
}
}
return theRetVal;
private Object doCallHooks(POINTCUT thePointcut, HookParams theParams) {
List<IInvoker> invokers = getInvokersForPointcut(thePointcut);
return callInvokers(thePointcut, theParams, invokers);
}
@VisibleForTesting
List<Object> getInterceptorsWithInvokersForPointcut(POINTCUT thePointcut) {
return getInvokersForPointcut(thePointcut).stream()
.map(BaseInvoker::getInterceptor)
.map(IInvoker::getInterceptor)
.collect(Collectors.toList());
}
/**
* Returns an ordered list of invokers for the given pointcut. Note that
* a new and stable list is returned to.. do whatever you want with it.
* Returns a list of all invokers registered for the given pointcut. The list
* is ordered by the invoker order (specified on the {@link Interceptor#order()}
* and {@link Hook#order()} values.
*
* @return The list returned by this method will always be a newly created list, so it will be stable and can be modified.
*/
private List<BaseInvoker> getInvokersForPointcut(POINTCUT thePointcut) {
List<BaseInvoker> invokers;
@Override
public List<IInvoker> getInvokersForPointcut(POINTCUT thePointcut) {
List<IInvoker> invokers;
synchronized (myRegistryMutex) {
List<BaseInvoker> globalInvokers = myGlobalInvokers.get(thePointcut);
List<BaseInvoker> anonymousInvokers = myAnonymousInvokers.get(thePointcut);
List<BaseInvoker> threadLocalInvokers = null;
invokers = union(globalInvokers, anonymousInvokers, threadLocalInvokers);
List<IInvoker> globalInvokers = myGlobalInvokers.get(thePointcut);
List<IInvoker> anonymousInvokers = myAnonymousInvokers.get(thePointcut);
invokers = union(Arrays.asList(globalInvokers, anonymousInvokers));
}
return invokers;
}
/**
* First argument must be the global invoker list!!
*/
@SafeVarargs
private List<BaseInvoker> union(List<BaseInvoker>... theInvokersLists) {
List<BaseInvoker> haveOne = null;
boolean haveMultiple = false;
for (List<BaseInvoker> nextInvokerList : theInvokersLists) {
if (nextInvokerList == null || nextInvokerList.isEmpty()) {
continue;
}
if (haveOne == null) {
haveOne = nextInvokerList;
} else {
haveMultiple = true;
}
}
if (haveOne == null) {
return Collections.emptyList();
}
List<BaseInvoker> retVal;
if (!haveMultiple) {
// The global list doesn't need to be sorted every time since it's sorted on
// insertion each time. Doing so is a waste of cycles..
if (haveOne == theInvokersLists[0]) {
retVal = haveOne;
} else {
retVal = new ArrayList<>(haveOne);
retVal.sort(Comparator.naturalOrder());
}
} else {
retVal = Arrays.stream(theInvokersLists)
.filter(Objects::nonNull)
.flatMap(Collection::stream)
.sorted()
.collect(Collectors.toList());
}
return retVal;
}
/**
* Only call this when assertions are enabled, it's expensive
*/
final boolean haveAppropriateParams(POINTCUT thePointcut, HookParams theParams) {
public static boolean haveAppropriateParams(IPointcut thePointcut, HookParams theParams) {
if (theParams.getParamsForType().values().size()
!= thePointcut.getParameterTypes().size()) {
throw new IllegalArgumentException(Msg.code(1909)
@ -430,7 +367,7 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
}
private List<HookInvoker> scanInterceptorAndAddToInvokerMultimap(
Object theInterceptor, ListMultimap<POINTCUT, BaseInvoker> theInvokers) {
Object theInterceptor, ListMultimap<POINTCUT, IInvoker> theInvokers) {
Class<?> interceptorClass = theInterceptor.getClass();
int typeOrder = determineOrder(interceptorClass);
@ -452,7 +389,7 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
// Make sure we're always sorted according to the order declared in @Order
for (POINTCUT nextPointcut : theInvokers.keys()) {
List<BaseInvoker> nextInvokerList = theInvokers.get(nextPointcut);
List<IInvoker> nextInvokerList = theInvokers.get(nextPointcut);
nextInvokerList.sort(Comparator.naturalOrder());
}
@ -483,6 +420,108 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
protected abstract Optional<HookDescriptor> scanForHook(Method nextMethod);
public static Object callInvokers(IPointcut thePointcut, HookParams theParams, List<IInvoker> invokers) {
Object retVal = null;
/*
* Call each hook in order
*/
for (IInvoker nextInvoker : invokers) {
Object nextOutcome = nextInvoker.invoke(theParams);
Class<?> pointcutReturnType = thePointcut.getReturnType();
if (pointcutReturnType.equals(thePointcut.getBooleanReturnTypeForEnum())) {
Boolean nextOutcomeAsBoolean = (Boolean) nextOutcome;
if (Boolean.FALSE.equals(nextOutcomeAsBoolean)) {
ourLog.trace("callHooks({}) for invoker({}) returned false", thePointcut, nextInvoker);
retVal = false;
break;
} else {
retVal = true;
}
} else if (!pointcutReturnType.equals(void.class)) {
if (nextOutcome != null) {
retVal = nextOutcome;
break;
}
}
}
return retVal;
}
/**
* First argument must be the global invoker list!!
*/
public static List<IInvoker> union(List<List<IInvoker>> theInvokersLists) {
List<IInvoker> haveOne = null;
boolean haveMultiple = false;
for (List<IInvoker> nextInvokerList : theInvokersLists) {
if (nextInvokerList == null || nextInvokerList.isEmpty()) {
continue;
}
if (haveOne == null) {
haveOne = nextInvokerList;
} else {
haveMultiple = true;
}
}
if (haveOne == null) {
return Collections.emptyList();
}
List<IInvoker> retVal;
if (!haveMultiple) {
// The global list doesn't need to be sorted every time since it's sorted on
// insertion each time. Doing so is a waste of cycles..
if (haveOne == theInvokersLists.get(0)) {
retVal = haveOne;
} else {
retVal = new ArrayList<>(haveOne);
retVal.sort(Comparator.naturalOrder());
}
} else {
int totalSize = 0;
for (List<IInvoker> list : theInvokersLists) {
totalSize += list.size();
}
retVal = new ArrayList<>(totalSize);
for (List<IInvoker> list : theInvokersLists) {
retVal.addAll(list);
}
retVal.sort(Comparator.naturalOrder());
}
return retVal;
}
protected static <T extends Annotation> Optional<T> findAnnotation(
AnnotatedElement theObject, Class<T> theHookClass) {
T annotation;
if (theObject instanceof Method) {
annotation = MethodUtils.getAnnotation((Method) theObject, theHookClass, true, true);
} else {
annotation = theObject.getAnnotation(theHookClass);
}
return Optional.ofNullable(annotation);
}
private static int determineOrder(Class<?> theInterceptorClass) {
return findAnnotation(theInterceptorClass, Interceptor.class)
.map(Interceptor::order)
.orElse(Interceptor.DEFAULT_ORDER);
}
private static String toErrorString(List<String> theParameterTypes) {
return theParameterTypes.stream().sorted().collect(Collectors.joining(","));
}
private class HookInvoker extends BaseInvoker {
private final Method myMethod;
@ -501,10 +540,11 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
myMethod = theHookMethod;
Class<?> returnType = theHookMethod.getReturnType();
if (myPointcut.getReturnType().equals(getBooleanReturnType())) {
if (myPointcut.getReturnType().equals(myPointcut.getBooleanReturnTypeForEnum())) {
Validate.isTrue(
getBooleanReturnType().equals(returnType) || void.class.equals(returnType),
"Method does not return boolean or void: %s",
myPointcut.getBooleanReturnTypeForEnum().equals(returnType) || void.class.equals(returnType),
"Method does not return %s or void: %s",
myPointcut.getBooleanReturnTypeForEnum().getSimpleName(),
theHookMethod);
} else if (myPointcut.getReturnType().equals(void.class)) {
Validate.isTrue(void.class.equals(returnType), "Method does not return void: %s", theHookMethod);
@ -541,7 +581,7 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
* @return Returns true/false if the hook method returns a boolean, returns true otherwise
*/
@Override
Object invoke(HookParams theParams) {
public Object invoke(HookParams theParams) {
Object[] args = new Object[myParameterTypes.length];
for (int i = 0; i < myParameterTypes.length; i++) {
@ -610,7 +650,7 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
}
}
protected abstract static class BaseInvoker implements Comparable<BaseInvoker> {
public abstract static class BaseInvoker implements IInvoker {
private final int myOrder;
private final Object myInterceptor;
@ -620,36 +660,19 @@ public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & I
myOrder = theOrder;
}
@Override
public Object getInterceptor() {
return myInterceptor;
}
abstract Object invoke(HookParams theParams);
@Override
public int getOrder() {
return myOrder;
}
@Override
public int compareTo(BaseInvoker theInvoker) {
return myOrder - theInvoker.myOrder;
public int compareTo(IInvoker theInvoker) {
return myOrder - theInvoker.getOrder();
}
}
protected static <T extends Annotation> Optional<T> findAnnotation(
AnnotatedElement theObject, Class<T> theHookClass) {
T annotation;
if (theObject instanceof Method) {
annotation = MethodUtils.getAnnotation((Method) theObject, theHookClass, true, true);
} else {
annotation = theObject.getAnnotation(theHookClass);
}
return Optional.ofNullable(annotation);
}
private static int determineOrder(Class<?> theInterceptorClass) {
return findAnnotation(theInterceptorClass, Interceptor.class)
.map(Interceptor::order)
.orElse(Interceptor.DEFAULT_ORDER);
}
private static String toErrorString(List<String> theParameterTypes) {
return theParameterTypes.stream().sorted().collect(Collectors.joining(","));
}
}

View File

@ -64,8 +64,8 @@ public class InterceptorService extends BaseInterceptorService<Pointcut>
@Override
public void registerAnonymousInterceptor(Pointcut thePointcut, int theOrder, IAnonymousInterceptor theInterceptor) {
Validate.notNull(thePointcut);
Validate.notNull(theInterceptor);
Validate.notNull(thePointcut, "thePointcut must not be null");
Validate.notNull(theInterceptor, "theInterceptor must not be null");
BaseInvoker invoker = new AnonymousLambdaInvoker(thePointcut, theInterceptor, theOrder);
registerAnonymousInterceptor(thePointcut, theInterceptor, invoker);
}
@ -81,7 +81,7 @@ public class InterceptorService extends BaseInterceptorService<Pointcut>
}
@Override
Object invoke(HookParams theParams) {
public Object invoke(HookParams theParams) {
myHook.invoke(myPointcut, theParams);
return true;
}

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.narrative2;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.util.BundleUtil;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.instance.model.api.IBaseBundle;
@ -42,7 +43,8 @@ public class NarrativeGeneratorTemplateUtils {
* Given a Bundle as input, are any entries present with a given resource type
*/
public boolean bundleHasEntriesWithResourceType(IBaseBundle theBaseBundle, String theResourceType) {
FhirContext ctx = theBaseBundle.getStructureFhirVersionEnum().newContextCached();
FhirVersionEnum fhirVersionEnum = theBaseBundle.getStructureFhirVersionEnum();
FhirContext ctx = FhirContext.forCached(fhirVersionEnum);
List<Pair<String, IBaseResource>> entryResources =
BundleUtil.getBundleEntryUrlsAndResources(ctx, theBaseBundle);
return entryResources.stream()

View File

@ -342,6 +342,8 @@ public class Constants {
*/
public static final String HIBERNATE_INTEGRATION_ENVERS_ENABLED = "hibernate.integration.envers.enabled";
public static final String OPENTELEMETRY_BASE_NAME = "io.hapifhir";
public static final String PARAM_NEW_REFERENCE_TARGET_ID = "newReferenceTargetId";
static {

View File

@ -33,7 +33,7 @@ public class SleepUtil {
@SuppressWarnings("BusyWait")
public void sleepAtLeast(long theMillis, boolean theLogProgress) {
long start = System.currentTimeMillis();
while (System.currentTimeMillis() <= start + theMillis) {
while (System.currentTimeMillis() < start + theMillis) {
try {
long timeSinceStarted = System.currentTimeMillis() - start;
long timeToSleep = Math.max(0, theMillis - timeSinceStarted);

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -668,7 +668,7 @@ public abstract class BaseCommand implements Comparable<BaseCommand> {
protected void parseFhirContext(CommandLine theCommandLine) throws ParseException {
FhirVersionEnum versionEnum = parseFhirVersion(theCommandLine);
myFhirCtx = versionEnum.newContext();
myFhirCtx = FhirContext.forVersion(versionEnum);
}
public abstract void run(CommandLine theCommandLine) throws ParseException, ExecutionException;

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -98,7 +98,7 @@ public class VersionCanonicalizer {
private final FhirContext myContext;
public VersionCanonicalizer(FhirVersionEnum theTargetVersion) {
this(theTargetVersion.newContextCached());
this(FhirContext.forCached(theTargetVersion));
}
public VersionCanonicalizer(FhirContext theTargetContext) {

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -40,7 +40,6 @@ import jakarta.annotation.Nonnull;
import jakarta.servlet.ServletException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.filefilter.WildcardFileFilter;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
import org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService;
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
import org.hl7.fhir.common.hapi.validation.support.NpmPackageValidationSupport;
@ -343,7 +342,8 @@ public class ValidatorExamples {
// START SNIPPET: validateSupplyProfiles
FhirContext ctx = FhirContext.forR4();
// Create a chain that will hold our modules
// Create a chain that will hold our modules and caches the
// values they supply
ValidationSupportChain supportChain = new ValidationSupportChain();
// DefaultProfileValidationSupport supplies base FHIR definitions. This is generally required
@ -368,12 +368,9 @@ public class ValidatorExamples {
// Add the custom definitions to the chain
supportChain.addValidationSupport(prePopulatedSupport);
// Wrap the chain in a cache to improve performance
CachingValidationSupport cache = new CachingValidationSupport(supportChain);
// Create a validator using the FhirInstanceValidator module. We can use this
// validator to perform validation
FhirInstanceValidator validatorModule = new FhirInstanceValidator(cache);
FhirInstanceValidator validatorModule = new FhirInstanceValidator(supportChain);
FhirValidator validator = ctx.newValidator().registerValidatorModule(validatorModule);
ValidationResult result = validator.validateWithResult(input);
// END SNIPPET: validateSupplyProfiles
@ -403,12 +400,9 @@ public class ValidatorExamples {
remoteTermSvc.setBaseUrl("http://hapi.fhir.org/baseR4");
supportChain.addValidationSupport(remoteTermSvc);
// Wrap the chain in a cache to improve performance
CachingValidationSupport cache = new CachingValidationSupport(supportChain);
// Create a validator using the FhirInstanceValidator module. We can use this
// validator to perform validation
FhirInstanceValidator validatorModule = new FhirInstanceValidator(cache);
FhirInstanceValidator validatorModule = new FhirInstanceValidator(supportChain);
FhirValidator validator = ctx.newValidator().registerValidatorModule(validatorModule);
ValidationResult result = validator.validateWithResult(input);
// END SNIPPET: validateUsingRemoteTermSvr
@ -462,12 +456,11 @@ public class ValidatorExamples {
new CommonCodeSystemsTerminologyService(ctx),
new InMemoryTerminologyServerValidationSupport(ctx),
new SnapshotGeneratingValidationSupport(ctx));
CachingValidationSupport validationSupport = new CachingValidationSupport(validationSupportChain);
// Create a validator. Note that for good performance you can create as many validator objects
// as you like, but you should reuse the same validation support object in all of the,.
FhirValidator validator = ctx.newValidator();
FhirInstanceValidator instanceValidator = new FhirInstanceValidator(validationSupport);
FhirInstanceValidator instanceValidator = new FhirInstanceValidator(validationSupportChain);
validator.registerValidatorModule(instanceValidator);
// Create a test patient to validate

View File

@ -0,0 +1,8 @@
---
type: fix
issue: 6404
title: "Searches using fulltext search that combined `_lastUpdated` query parameter with
any other (supported) fulltext query parameter would find no matches, even
if matches existed.
This has been corrected.
"

View File

@ -0,0 +1,13 @@
---
type: perf
issue: 6508
title: "The ValidationSupportChain module has been rewritten to improve validator performance. This change:
* Adds new caching capabilities to ValidationSupportChain. This is an improvement over the previous separate caching module because the chain can now remember which entries in the cache responded affirmative to `isValueSetSupported()` and will therefore be more efficient about trying entries in the chain. It also makes debugging much less confusing as there is less recursion and the caches don't use loadingCache.
* Importantly, the caching in ValidationSupportChain caches negative lookups (i.e. items that could not be found by URL) as well as positive lookups. This is a change from the historical caching behaviour.
* Changes ValidationSupportChain to never expire StructureDefinition entries in the cache, which is needed because the validator makes assumptions about structuredefinitions never changing. Fixes #6424.
* Modifies `VersionSpecificWorkerContextWrapper` so that it doesn't use a separate cache and instead relies on the caching provided by ValidationSupportChain. This class previously used a cache because it converts arbitrary versions of FHIR StructureDefinitions into the canonical version required by the validator (R5), but these converted versions are now stored in the userdata map of objects returned by and cached by ValidationSupportChain. This makes the caching more predictable since there is only one cache to track.
* Adds OpenTelemetry support to ValidationSupportChain, with metrics for tracking the cache size.
* Deprecates CachingValidationSupport since caching is now provided by ValidationSupportChain. CachingValidationSupport is now just a passthrough and should be removed from applications. It will be removed from the library in a future release.
* Removes ConceptMap caching from TermReachSvcImpl, as this caching is both redundant and inefficient as it operates within a database transaction.
These changes result in very significant performance improvements when performing validation in the JPA server. Throughput improvements of 1000% have been recorded in benchmarking use cases involving large profiles and remote terminology services enabled. Many other validation use cases should see significant improvements as well."

View File

@ -0,0 +1,8 @@
---
type: add
issue: 6511
title: "Interceptors can be defined against the registry on the RestfulServer, or on
the registry in the JPA repository. Because these are separate registries, the order()
attribute on the Hook annotation isn't correctly processed today across the two
registries. The CompositeInterceptorRegistry has been reworked so ordering will be
respected across both registries."

View File

@ -0,0 +1,4 @@
---
type: remove
issue: 6512
title: "The methods on FhirVersionEnum which produces a FhirContext (newContext() ,and newContextCached()) have been deprecated, and will be removed."

View File

@ -0,0 +1,4 @@
# Fulltext Search with _lastUpdated Filter
Fulltext searches have been updated to support `_lastUpdated` search parameter. A reindexing of Search Parameters
is required to migrate old data to support the `_lastUpdated` search parameter.

View File

@ -16,6 +16,21 @@ There are a several implementations of the [IValidationSupport](/hapi-fhir/apido
This module can be used to combine multiple implementations together so that for every request, each support class instance in the chain is tried in sequence. Note that nearly all methods in the [IValidationSupport](/hapi-fhir/apidocs/hapi-fhir-base/ca/uhn/fhir/context/support/IValidationSupport.html) interface are permitted to return `null` if they are not able to service a particular method call. So for example, if a call to the [`validateCode`](/hapi-fhir/apidocs/hapi-fhir-base/ca/uhn/fhir/context/support/IValidationSupport.html#validateCode(ca.uhn.fhir.context.support.ValidationSupportContext,ca.uhn.fhir.context.support.ConceptValidationOptions,java.lang.String,java.lang.String,java.lang.String,java.lang.String)) method is made, the validator will try each module in the chain until one of them returns a non-null response.
The following chaining logic is used:
* Calls to `fetchAll...` methods such as `fetchAllConformanceResources()` and `fetchAllStructureDefinitions()` will call every method in the chain in order, and aggregate the results into a single list to return.
* Calls to fetch or validate codes, such as `validateCode(...)` and `lookupCode(...)` will first test each module in the chain using the`isCodeSystemSupported(...)` or `isValueSetSupported(...)` methods (depending on whether a ValueSet URL is present in the method parameters) and will invoke any methods in the chain which return that they can handle the given CodeSystem/ValueSet URL. The first non-null value returned by a method in the chain that can support the URL will be returned to the caller.
* All other methods will invoke the method in the chain in order, and will return immediately as soon as a non-null value is returned.
The following caching logic is used if caching is enabled using `CacheConfiguration`. You can use `CacheConfiguration.disabled()` if you want to disable caching.
* Calls to fetch StructureDefinitions including `fetchAllStructureDefinitions()` and `fetchStructureDefinition(...)` are cached in a non-expiring cache. This is because the `FhirInstanceValidator` module makes assumptions that these objects will not change for the lifetime of the validator for performance reasons.
* Calls to all other `fetchAll...` methods including `fetchAllConformanceResources()` and `fetchAllSearchParameters()` cache their results in an expiring cache, but will refresh that cache asynchronously.
* Results of `generateSnapshot(...)` are not cached, as this method is generally called in contexts where the results are cached.
* Results of all other methods are stored in an expiring cache.
Note that caching functionality used to be provided by a separate provider called {@literal CachingValidationSupport} but that functionality has been moved into this class as of HAPI FHIR 8.0.0, because it is possible to provide a more efficient chain when these functions are combined.
# DefaultProfileValidationSupport
[JavaDoc](/hapi-fhir/apidocs/hapi-fhir-base/undefined/ca/uhn/fhir/context/support/DefaultProfileValidationSupport.html) / [Source](https://github.com/hapifhir/hapi-fhir/blob/master/hapi-fhir-base/src/main/java/ca/uhn/fhir/context/support/DefaultProfileValidationSupport.java)
@ -44,12 +59,6 @@ This module contains a series of HashMaps that store loaded conformance resource
This module can be used to load FHIR NPM Packages and supply the conformance resources within them to the validator. See [Validating Using Packages](./instance_validator.html#packages) for am example of how to use this module.
# CachingValidationSupport
[JavaDoc](/hapi-fhir/apidocs/hapi-fhir-validation/org/hl7/fhir/common/hapi/validation/support/CachingValidationSupport.html) / [Source](https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CachingValidationSupport.java)
This module caches results of calls to a wrapped service implementation for a period of time. This class can be a significant help in terms of performance if you are loading conformance resources or performing terminology operations from a database or disk, but it also has value even for purely in-memory validation since validating codes against a ValueSet can require the expansion of that ValueSet.
# SnapshotGeneratingValidationSupport
[JavaDoc](/hapi-fhir/apidocs/hapi-fhir-validation/org/hl7/fhir/common/hapi/validation/support/SnapshotGeneratingValidationSupport.html) / [Source](https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/SnapshotGeneratingValidationSupport.java)
@ -161,6 +170,12 @@ This validation support module may be placed at the end of a ValidationSupportCh
Note that this module must also be activated by calling [setAllowNonExistentCodeSystem(true)](/hapi-fhir/apidocs/hapi-fhir-validation/org/hl7/fhir/common/hapi/validation/support/UnknownCodeSystemWarningValidationSupport.html#setAllowNonExistentCodeSystem(boolean)) in order to specify that unknown code systems should be allowed.
# CachingValidationSupport
[JavaDoc](/hapi-fhir/apidocs/hapi-fhir-validation/org/hl7/fhir/common/hapi/validation/support/CachingValidationSupport.html) / [Source](https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/CachingValidationSupport.java)
This module is deprecated and no longer provides any functionality. Caching is provided by [ValidationSupportChain](#validationsupportchain).
# Recipes

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -27,7 +27,6 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkExportProcessor;
@ -349,10 +348,9 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
* Get a ISearchBuilder for the given resource type.
*/
protected ISearchBuilder<JpaPid> getSearchBuilderForResourceType(String theResourceType) {
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceType);
RuntimeResourceDefinition def = myContext.getResourceDefinition(theResourceType);
Class<? extends IBaseResource> typeClass = def.getImplementingClass();
return mySearchBuilderFactory.newSearchBuilder(dao, theResourceType, typeClass);
return mySearchBuilderFactory.newSearchBuilder(theResourceType, typeClass);
}
protected RuntimeSearchParam getPatientSearchParamForCurrentResourceType(String theResourceType) {

View File

@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.jpa.api.IDaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.config.util.ResourceCountCacheUtil;
import ca.uhn.fhir.jpa.config.util.ValidationSupportConfigUtil;
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.search.HSearchSortHelperImpl;
@ -32,15 +31,12 @@ import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
import ca.uhn.fhir.jpa.search.StaleSearchDeletingSvcImpl;
import ca.uhn.fhir.jpa.util.ResourceCountCache;
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChain;
import ca.uhn.fhir.rest.api.IResourceSupportedSvc;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
@Configuration
@Import({JpaConfig.class})
@ -64,12 +60,6 @@ public class HapiJpaConfig {
return new StaleSearchDeletingSvcImpl();
}
@Primary
@Bean
public CachingValidationSupport validationSupportChain(JpaValidationSupportChain theJpaValidationSupportChain) {
return ValidationSupportConfigUtil.newCachingValidationSupport(theJpaValidationSupportChain);
}
@Bean
public DatabaseBackedPagingProvider databaseBackedPagingProvider() {
return new DatabaseBackedPagingProvider();

View File

@ -170,6 +170,7 @@ import ca.uhn.fhir.jpa.term.config.TermCodeSystemConfig;
import ca.uhn.fhir.jpa.util.JpaHapiTransactionService;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.util.PersistenceContextProvider;
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChain;
import ca.uhn.fhir.jpa.validation.ResourceLoaderImpl;
import ca.uhn.fhir.jpa.validation.ValidationSettings;
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
@ -187,6 +188,7 @@ import ca.uhn.fhir.util.MetaTagSorterAlphabetical;
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
import jakarta.annotation.Nullable;
import org.hl7.fhir.common.hapi.validation.support.UnknownCodeSystemWarningValidationSupport;
import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain;
import org.hl7.fhir.utilities.graphql.IGraphQLStorageServices;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
@ -232,11 +234,26 @@ public class JpaConfig {
public static final String PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER =
"PersistedJpaSearchFirstPageBundleProvider";
public static final String HISTORY_BUILDER = "HistoryBuilder";
public static final String DEFAULT_PROFILE_VALIDATION_SUPPORT = "myDefaultProfileValidationSupport";
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
@Autowired
public JpaStorageSettings myStorageSettings;
@Autowired
private FhirContext myFhirContext;
@Bean
public ValidationSupportChain.CacheConfiguration validationSupportChainCacheConfiguration() {
return ValidationSupportChain.CacheConfiguration.defaultValues();
}
@Bean(name = JpaConfig.JPA_VALIDATION_SUPPORT_CHAIN)
@Primary
public IValidationSupport jpaValidationSupportChain() {
return new JpaValidationSupportChain(myFhirContext, validationSupportChainCacheConfiguration());
}
@Bean("myDaoRegistry")
public DaoRegistry daoRegistry() {
return new DaoRegistry();

View File

@ -25,7 +25,6 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
@ -158,10 +157,8 @@ public class SearchConfig {
@Bean(name = ISearchBuilder.SEARCH_BUILDER_BEAN_NAME)
@Scope("prototype")
public ISearchBuilder newSearchBuilder(
IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType) {
public ISearchBuilder newSearchBuilder(String theResourceName, Class<? extends IBaseResource> theResourceType) {
return new SearchBuilder(
theDao,
theResourceName,
myStorageSettings,
myEntityManagerFactory,

View File

@ -20,31 +20,31 @@
package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.support.DefaultProfileValidationSupport;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChain;
import ca.uhn.fhir.jpa.validation.ValidatorPolicyAdvisor;
import ca.uhn.fhir.jpa.validation.ValidatorResourceFetcher;
import ca.uhn.fhir.validation.IInstanceValidatorModule;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.common.hapi.validation.validator.HapiToHl7OrgDstu2ValidatingSupportWrapper;
import org.hl7.fhir.r5.utils.validation.constants.BestPracticeWarningLevel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
@Configuration
public class ValidationSupportConfig {
@Bean(name = "myDefaultProfileValidationSupport")
public DefaultProfileValidationSupport defaultProfileValidationSupport(FhirContext theFhirContext) {
return new DefaultProfileValidationSupport(theFhirContext);
@Autowired
private FhirContext myFhirContext;
@Bean(name = JpaConfig.DEFAULT_PROFILE_VALIDATION_SUPPORT)
public DefaultProfileValidationSupport defaultProfileValidationSupport() {
return new DefaultProfileValidationSupport(myFhirContext);
}
@Bean
@ -56,11 +56,6 @@ public class ValidationSupportConfig {
return retVal;
}
@Bean(name = JpaConfig.JPA_VALIDATION_SUPPORT_CHAIN)
public JpaValidationSupportChain jpaValidationSupportChain(FhirContext theFhirContext) {
return new JpaValidationSupportChain(theFhirContext);
}
@Bean(name = JpaConfig.JPA_VALIDATION_SUPPORT)
public IValidationSupport jpaValidationSupport(FhirContext theFhirContext) {
return new JpaPersistedResourceValidationSupport(theFhirContext);
@ -68,26 +63,13 @@ public class ValidationSupportConfig {
@Bean(name = "myInstanceValidator")
public IInstanceValidatorModule instanceValidator(
FhirContext theFhirContext,
CachingValidationSupport theCachingValidationSupport,
ValidationSupportChain theValidationSupportChain,
IValidationSupport theValidationSupport,
DaoRegistry theDaoRegistry) {
if (theFhirContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3)) {
FhirInstanceValidator val = new FhirInstanceValidator(theCachingValidationSupport);
val.setValidatorResourceFetcher(
jpaValidatorResourceFetcher(theFhirContext, theValidationSupport, theDaoRegistry));
val.setValidatorPolicyAdvisor(jpaValidatorPolicyAdvisor());
val.setBestPracticeWarningLevel(BestPracticeWarningLevel.Warning);
val.setValidationSupport(theCachingValidationSupport);
return val;
} else {
CachingValidationSupport cachingValidationSupport = new CachingValidationSupport(
new HapiToHl7OrgDstu2ValidatingSupportWrapper(theValidationSupportChain));
FhirInstanceValidator retVal = new FhirInstanceValidator(cachingValidationSupport);
retVal.setBestPracticeWarningLevel(BestPracticeWarningLevel.Warning);
return retVal;
}
FhirContext theFhirContext, IValidationSupport theValidationSupportChain, DaoRegistry theDaoRegistry) {
FhirInstanceValidator val = new FhirInstanceValidator(theValidationSupportChain);
val.setValidatorResourceFetcher(
jpaValidatorResourceFetcher(theFhirContext, theValidationSupportChain, theDaoRegistry));
val.setValidatorPolicyAdvisor(jpaValidatorPolicyAdvisor());
val.setBestPracticeWarningLevel(BestPracticeWarningLevel.Warning);
return val;
}
@Bean

View File

@ -1,43 +0,0 @@
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.config.util;
import ca.uhn.fhir.jpa.validation.JpaValidationSupportChain;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
public final class ValidationSupportConfigUtil {
private ValidationSupportConfigUtil() {}
public static CachingValidationSupport newCachingValidationSupport(
JpaValidationSupportChain theJpaValidationSupportChain) {
return newCachingValidationSupport(theJpaValidationSupportChain, false);
}
public static CachingValidationSupport newCachingValidationSupport(
JpaValidationSupportChain theJpaValidationSupportChain,
boolean theIsEnabledValidationForCodingsLogicalAnd) {
// Short timeout for code translation because TermConceptMappingSvcImpl has its own caching
CachingValidationSupport.CacheTimeouts cacheTimeouts =
CachingValidationSupport.CacheTimeouts.defaultValues().setTranslateCodeMillis(1000);
return new CachingValidationSupport(
theJpaValidationSupportChain, cacheTimeouts, theIsEnabledValidationForCodingsLogicalAnd);
}
}

View File

@ -1057,8 +1057,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
// Interceptor broadcast: JPA_PERFTRACE_INFO
if (!presenceCount.isEmpty()) {
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) {
StorageProcessingMessage message = new StorageProcessingMessage();
message.setMessage(
"For " + entity.getIdDt().toUnqualifiedVersionless().getValue() + " added "
@ -1068,8 +1069,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, message);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params);
}
}
}
@ -1092,8 +1092,10 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
// Interceptor broadcast: JPA_PERFTRACE_INFO
if (!searchParamAddRemoveCount.isEmpty()) {
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequest)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(
myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) {
StorageProcessingMessage message = new StorageProcessingMessage();
message.setMessage("For "
+ entity.getIdDt().toUnqualifiedVersionless().getValue() + " added "
@ -1104,8 +1106,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, message);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params);
}
}
}
@ -1688,7 +1689,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
theEntity.setContentText(parseContentTextIntoWords(theContext, theResource));
if (myStorageSettings.isAdvancedHSearchIndexing()) {
ExtendedHSearchIndexData hSearchIndexData =
myFulltextSearchSvc.extractLuceneIndexData(theResource, theNewParams);
myFulltextSearchSvc.extractLuceneIndexData(theResource, theEntity, theNewParams);
theEntity.setLuceneIndexData(hSearchIndexData);
}
}

View File

@ -227,15 +227,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Nullable
public static <T extends IBaseResource> T invokeStoragePreShowResources(
IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequest, T retVal) {
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.STORAGE_PRESHOW_RESOURCES, theInterceptorBroadcaster, theRequest)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(theInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PRESHOW_RESOURCES)) {
SimplePreResourceShowDetails showDetails = new SimplePreResourceShowDetails(retVal);
HookParams params = new HookParams()
.add(IPreResourceShowDetails.class, showDetails)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
CompositeInterceptorBroadcaster.doCallHooks(
theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PRESHOW_RESOURCES, params);
//noinspection unchecked
retVal = (T) showDetails.getResource(
0); // TODO GGG/JA : getting resource 0 is interesting. We apparently allow null values in the list.
@ -251,15 +251,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
RequestDetails theRequest,
IIdType theId,
IBaseResource theResource) {
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.STORAGE_PREACCESS_RESOURCES, theInterceptorBroadcaster, theRequest)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(theInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) {
SimplePreResourceAccessDetails accessDetails = new SimplePreResourceAccessDetails(theResource);
HookParams params = new HookParams()
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
CompositeInterceptorBroadcaster.doCallHooks(
theInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params);
if (accessDetails.isDontReturnResourceAtIndex(0)) {
throw new ResourceNotFoundException(Msg.code(1995) + "Resource " + theId + " is not known");
}
@ -1585,15 +1585,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
private Optional<T> invokeStoragePreAccessResources(RequestDetails theRequest, T theResource) {
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.STORAGE_PREACCESS_RESOURCES, myInterceptorBroadcaster, theRequest)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) {
SimplePreResourceAccessDetails accessDetails = new SimplePreResourceAccessDetails(theResource);
HookParams params = new HookParams()
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params);
if (accessDetails.isDontReturnResourceAtIndex(0)) {
return Optional.empty();
}
@ -2103,7 +2103,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
ISearchBuilder<JpaPid> builder =
mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
mySearchBuilderFactory.newSearchBuilder(getResourceName(), getResourceType());
List<JpaPid> ids = new ArrayList<>();
@ -2136,8 +2136,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(
theRequest, myResourceName, theParams, theConditionalOperationTargetOrNull);
ISearchBuilder<JpaPid> builder =
mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
ISearchBuilder<JpaPid> builder = mySearchBuilderFactory.newSearchBuilder(getResourceName(), getResourceType());
String uuid = UUID.randomUUID().toString();
@ -2175,7 +2174,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
.withPropagation(Propagation.REQUIRED)
.searchList(() -> {
ISearchBuilder<JpaPid> builder =
mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
mySearchBuilderFactory.newSearchBuilder(getResourceName(), getResourceType());
Stream<JpaPid> pidStream =
builder.createQueryStream(theParams, searchRuntimeDetails, theRequest, requestPartitionId);
@ -2191,7 +2190,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Nonnull
private Stream<T> pidsToResource(RequestDetails theRequest, Stream<JpaPid> pidStream) {
ISearchBuilder<JpaPid> searchBuilder =
mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
mySearchBuilderFactory.newSearchBuilder(getResourceName(), getResourceType());
@SuppressWarnings("unchecked")
Stream<T> resourceStream = (Stream<T>) new QueryChunker<>()
.chunk(pidStream, SearchBuilder.getMaximumPageSize())

View File

@ -135,13 +135,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
@Override
public ExtendedHSearchIndexData extractLuceneIndexData(
IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) {
String resourceType = myFhirContext.getResourceType(theResource);
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(
resourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
return extractor.extract(theResource, theNewParams);
return extractor.extract(theResource, theEntity, theNewParams);
}
@Override
@ -516,8 +516,9 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
*/
@SuppressWarnings("rawtypes")
private void logQuery(SearchQueryOptionsStep theQuery, RequestDetails theRequestDetails) {
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_PERFTRACE_INFO, myInterceptorBroadcaster, theRequestDetails)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) {
StorageProcessingMessage storageProcessingMessage = new StorageProcessingMessage();
String queryString = theQuery.toQuery().queryString();
storageProcessingMessage.setMessage(queryString);
@ -525,8 +526,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(StorageProcessingMessage.class, storageProcessingMessage);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_INFO, params);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params);
}
}

View File

@ -48,6 +48,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -243,8 +246,23 @@ public class HistoryBuilder {
Subquery<Date> pastDateSubQuery = theQuery.subquery(Date.class);
Root<ResourceHistoryTable> subQueryResourceHistory = pastDateSubQuery.from(ResourceHistoryTable.class);
Expression myUpdatedMostRecent = theCriteriaBuilder.max(subQueryResourceHistory.get("myUpdated"));
/*
* This conversion from the Date in myRangeEndInclusive into a ZonedDateTime is an experiment -
* There is an intermittent test failure in testSearchHistoryWithAtAndGtParameters() that I can't
* figure out. But I've added a ton of logging to the error it fails with and I noticed that
* we emit SQL along the lines of
* select coalesce(max(rht2_0.RES_UPDATED), timestamp with time zone '2024-10-05 18:24:48.172000000Z')
* for this date, and all other dates are in GMT so this is an experiment. If nothing changes,
* we can roll this back to
* theCriteriaBuilder.literal(myRangeStartInclusive)
* JA 20241005
*/
ZonedDateTime rangeStart =
ZonedDateTime.ofInstant(Instant.ofEpochMilli(myRangeStartInclusive.getTime()), ZoneId.of("GMT"));
Expression myUpdatedMostRecentOrDefault =
theCriteriaBuilder.coalesce(myUpdatedMostRecent, theCriteriaBuilder.literal(myRangeStartInclusive));
theCriteriaBuilder.coalesce(myUpdatedMostRecent, theCriteriaBuilder.literal(rangeStart));
pastDateSubQuery
.select(myUpdatedMostRecentOrDefault)

View File

@ -89,7 +89,7 @@ public interface IFulltextSearchSvc {
boolean isDisabled();
ExtendedHSearchIndexData extractLuceneIndexData(
IBaseResource theResource, ResourceIndexedSearchParams theNewParams);
IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams);
/**
* Returns true if the parameter map can be handled for hibernate search.

View File

@ -164,6 +164,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
@Override
public IBaseResource fetchStructureDefinition(String theUrl) {
assert myStructureDefinitionType != null;
return fetchResource(myStructureDefinitionType, theUrl);
}

View File

@ -124,12 +124,15 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
final AtomicInteger counter = new AtomicInteger();
// Notify Interceptors about pre-action call
HookParams hooks = new HookParams()
.add(AtomicInteger.class, counter)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING, hooks);
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING)) {
HookParams hooks = new HookParams()
.add(AtomicInteger.class, counter)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING, hooks);
}
ourLog.info("BEGINNING GLOBAL $expunge");
Propagation propagation = Propagation.REQUIRES_NEW;

View File

@ -256,8 +256,9 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
ResourceHistoryTable theVersion,
IdDt theId) {
final AtomicInteger counter = new AtomicInteger();
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE, myInterceptorBroadcaster, theRequestDetails)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE)) {
IBaseResource resource = myJpaStorageResourceParser.toResource(theVersion, false);
HookParams params = new HookParams()
.add(AtomicInteger.class, counter)
@ -265,8 +266,7 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
.add(IBaseResource.class, resource)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE, params);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_EXPUNGE_RESOURCE, params);
}
theRemainingCount.addAndGet(-1 * counter.get());
}

View File

@ -392,7 +392,7 @@ public class ExtendedHSearchClauseBuilder {
/**
* Create date clause from date params. The date lower and upper bounds are taken
* into considertion when generating date query ranges
* into consideration when generating date query ranges
*
* <p>Example 1 ('eq' prefix/empty): <code>http://fhirserver/Observation?date=eq2020</code>
* would generate the following search clause

View File

@ -25,6 +25,8 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.search.CompositeSearchIndexData;
import ca.uhn.fhir.jpa.model.search.DateSearchIndexData;
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
@ -37,6 +39,7 @@ import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
import ca.uhn.fhir.util.MetaUtil;
import com.google.common.base.Strings;
import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.ObjectUtils;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -74,8 +77,10 @@ public class ExtendedHSearchIndexExtractor {
}
@Nonnull
public ExtendedHSearchIndexData extract(IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
ExtendedHSearchIndexData retVal = new ExtendedHSearchIndexData(myContext, myJpaStorageSettings, theResource);
public ExtendedHSearchIndexData extract(
IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) {
ExtendedHSearchIndexData retVal =
new ExtendedHSearchIndexData(myContext, myJpaStorageSettings, theResource, theEntity);
if (myJpaStorageSettings.isStoreResourceInHSearchIndex()) {
retVal.setRawResourceData(myContext.newJsonParser().encodeResourceToString(theResource));
@ -113,11 +118,27 @@ public class ExtendedHSearchIndexExtractor {
.filter(nextParam -> !nextParam.isMissing())
.forEach(nextParam -> retVal.addUriIndexData(nextParam.getParamName(), nextParam.getUri()));
theResource.getMeta().getTag().forEach(tag -> retVal.addTokenIndexData("_tag", tag));
theEntity.getTags().forEach(tag -> {
TagDefinition td = tag.getTag();
theResource.getMeta().getSecurity().forEach(sec -> retVal.addTokenIndexData("_security", sec));
theResource.getMeta().getProfile().forEach(prof -> retVal.addUriIndexData("_profile", prof.getValue()));
IBaseCoding coding = (IBaseCoding) myContext.getVersion().newCodingDt();
coding.setVersion(td.getVersion());
coding.setDisplay(td.getDisplay());
coding.setCode(td.getCode());
coding.setSystem(td.getSystem());
coding.setUserSelected(ObjectUtils.defaultIfNull(td.getUserSelected(), false));
switch (td.getTagType()) {
case TAG:
retVal.addTokenIndexData("_tag", coding);
break;
case PROFILE:
retVal.addUriIndexData("_profile", coding.getCode());
break;
case SECURITY_LABEL:
retVal.addTokenIndexData("_security", coding);
break;
}
});
String source = MetaUtil.getSource(myContext, theResource.getMeta());
if (isNotBlank(source)) {
@ -127,20 +148,14 @@ public class ExtendedHSearchIndexExtractor {
theNewParams.myCompositeParams.forEach(nextParam ->
retVal.addCompositeIndexData(nextParam.getSearchParamName(), buildCompositeIndexData(nextParam)));
if (theResource.getMeta().getLastUpdated() != null) {
int ordinal = ResourceIndexedSearchParamDate.calculateOrdinalValue(
theResource.getMeta().getLastUpdated())
if (theEntity.getUpdated() != null && !theEntity.getUpdated().isEmpty()) {
int ordinal = ResourceIndexedSearchParamDate.calculateOrdinalValue(theEntity.getUpdatedDate())
.intValue();
retVal.addDateIndexData(
"_lastUpdated",
theResource.getMeta().getLastUpdated(),
ordinal,
theResource.getMeta().getLastUpdated(),
ordinal);
"_lastUpdated", theEntity.getUpdatedDate(), ordinal, theEntity.getUpdatedDate(), ordinal);
}
if (!theNewParams.myLinks.isEmpty()) {
// awkwardly, links are indexed by jsonpath, not by search param.
// so we re-build the linkage.
Map<String, List<String>> linkPathToParamName = new HashMap<>();

View File

@ -28,6 +28,7 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
import ca.uhn.fhir.rest.param.CompositeParam;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
@ -89,19 +90,28 @@ public class ExtendedHSearchSearchBuilder {
* be inaccurate and wrong.
*/
public boolean canUseHibernateSearch(
String theResourceType, SearchParameterMap myParams, ISearchParamRegistry theSearchParamRegistry) {
String theResourceType, SearchParameterMap theParams, ISearchParamRegistry theSearchParamRegistry) {
boolean canUseHibernate = false;
ResourceSearchParams resourceActiveSearchParams = theSearchParamRegistry.getActiveSearchParams(
theResourceType, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH);
for (String paramName : myParams.keySet()) {
for (String paramName : theParams.keySet()) {
// special SearchParam handling:
// _lastUpdated
if (theParams.getLastUpdated() != null) {
canUseHibernate = !illegalForHibernateSearch(Constants.PARAM_LASTUPDATED, resourceActiveSearchParams);
if (!canUseHibernate) {
return false;
}
}
// is this parameter supported?
if (illegalForHibernateSearch(paramName, resourceActiveSearchParams)) {
canUseHibernate = false;
} else {
// are the parameter values supported?
canUseHibernate =
myParams.get(paramName).stream()
theParams.get(paramName).stream()
.flatMap(Collection::stream)
.collect(Collectors.toList())
.stream()
@ -136,6 +146,7 @@ public class ExtendedHSearchSearchBuilder {
// not yet supported in HSearch
myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE
&& supportsLastUpdated(myParams)
&& // ???
myParams.entrySet().stream()
.filter(e -> !ourUnsafeSearchParmeters.contains(e.getKey()))
@ -145,6 +156,19 @@ public class ExtendedHSearchSearchBuilder {
.allMatch(this::isParamTypeSupported);
}
private boolean supportsLastUpdated(SearchParameterMap theMap) {
if (theMap.getLastUpdated() == null || theMap.getLastUpdated().isEmpty()) {
return true;
}
DateRangeParam lastUpdated = theMap.getLastUpdated();
return lastUpdated.getLowerBound() != null
&& isParamTypeSupported(lastUpdated.getLowerBound())
&& lastUpdated.getUpperBound() != null
&& isParamTypeSupported(lastUpdated.getUpperBound());
}
/**
* Do we support this query param type+modifier?
* <p>

View File

@ -106,13 +106,15 @@ public class DeleteConflictService {
}
// Notify Interceptors about pre-action call
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
HookParams hooks = new HookParams()
.add(DeleteConflictList.class, theDeleteConflicts)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(TransactionDetails.class, theTransactionDetails);
return (DeleteConflictOutcome) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(
myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, hooks);
return (DeleteConflictOutcome)
compositeBroadcaster.callHooksAndReturnObject(Pointcut.STORAGE_PRESTORAGE_DELETE_CONFLICTS, hooks);
}
private void addConflictsToList(

View File

@ -155,17 +155,19 @@ public class ThreadSafeResourceDeleterSvc {
TransactionDetails theTransactionDetails,
IdDt nextSource,
IFhirResourceDao<?> dao) {
// Interceptor call: STORAGE_CASCADE_DELETE
// Remove the version so we grab the latest version to delete
IBaseResource resource = dao.read(nextSource.toVersionless(), theRequest);
// Interceptor call: STORAGE_CASCADE_DELETE
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(DeleteConflictList.class, theConflictList)
.add(IBaseResource.class, resource);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_CASCADE_DELETE, params);
compositeBroadcaster.callHooks(Pointcut.STORAGE_CASCADE_DELETE, params);
return dao.delete(resource.getIdElement(), theConflictList, theRequest, theTransactionDetails);
}

View File

@ -27,7 +27,6 @@ import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.HistoryBuilder;
import ca.uhn.fhir.jpa.dao.HistoryBuilderFactory;
@ -189,15 +188,17 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
retVal.add(myJpaStorageResourceParser.toResource(resource, true));
}
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, myRequest);
// Interceptor call: STORAGE_PREACCESS_RESOURCES
{
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) {
SimplePreResourceAccessDetails accessDetails = new SimplePreResourceAccessDetails(retVal);
HookParams params = new HookParams()
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params);
for (int i = retVal.size() - 1; i >= 0; i--) {
if (accessDetails.isDontReturnResourceAtIndex(i)) {
@ -207,14 +208,13 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
}
// Interceptor broadcast: STORAGE_PRESHOW_RESOURCES
{
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PRESHOW_RESOURCES)) {
SimplePreResourceShowDetails showDetails = new SimplePreResourceShowDetails(retVal);
HookParams params = new HookParams()
.add(IPreResourceShowDetails.class, showDetails)
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PRESHOW_RESOURCES, params);
retVal = showDetails.toList();
}
@ -254,9 +254,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
String resourceName = mySearchEntity.getResourceType();
Class<? extends IBaseResource> resourceType =
myContext.getResourceDefinition(resourceName).getImplementingClass();
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(resourceName);
final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, resourceType);
final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(resourceName, resourceType);
RequestPartitionId requestPartitionId = getRequestPartitionId();
// we request 1 more resource than we need

View File

@ -374,8 +374,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
Class<? extends IBaseResource> resourceTypeClass =
myContext.getResourceDefinition(theResourceType).getImplementingClass();
final ISearchBuilder<JpaPid> sb =
mySearchBuilderFactory.newSearchBuilder(theCallingDao, theResourceType, resourceTypeClass);
final ISearchBuilder<JpaPid> sb = mySearchBuilderFactory.newSearchBuilder(theResourceType, resourceTypeClass);
sb.setFetchSize(mySyncSize);
final Integer loadSynchronousUpTo = getLoadSynchronousUpToOrNull(theCacheControlDirective);
@ -599,17 +598,18 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
.withRequest(theRequestDetails)
.withRequestPartitionId(theRequestPartitionId)
.execute(() -> {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(
myInterceptorBroadcaster, theRequestDetails);
// Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH
HookParams params = new HookParams()
.add(SearchParameterMap.class, theParams)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
boolean canUseCache = CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster,
theRequestDetails,
Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH,
params);
boolean canUseCache =
compositeBroadcaster.callHooks(Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params);
if (!canUseCache) {
return null;
}
@ -626,11 +626,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
.add(SearchParameterMap.class, theParams)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster,
theRequestDetails,
Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED,
params);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params);
return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid());
});

View File

@ -27,7 +27,6 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
@ -181,12 +180,16 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
}
JpaPreResourceAccessDetails accessDetails = new JpaPreResourceAccessDetails(pids, () -> theSb);
HookParams params = new HookParams()
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(
myInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) {
HookParams params = new HookParams()
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params);
}
for (int i = pids.size() - 1; i >= 0; i--) {
if (accessDetails.isDontReturnResourceAtIndex(i)) {
@ -279,12 +282,9 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
RequestPartitionId theRequestPartitionId) {
final String searchUuid = UUID.randomUUID().toString();
IFhirResourceDao<?> callingDao = myDaoRegistry.getResourceDao(theResourceType);
Class<? extends IBaseResource> resourceTypeClass =
myContext.getResourceDefinition(theResourceType).getImplementingClass();
final ISearchBuilder sb =
mySearchBuilderFactory.newSearchBuilder(callingDao, theResourceType, resourceTypeClass);
final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(theResourceType, resourceTypeClass);
sb.setFetchSize(mySyncSize);
return executeQuery(
theSearchParameterMap,

View File

@ -33,7 +33,6 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
@ -192,7 +191,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private final FhirContext myContext;
private final IIdHelperService<JpaPid> myIdHelperService;
private final JpaStorageSettings myStorageSettings;
private final IDao myCallingDao;
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@ -220,7 +218,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public SearchBuilder(
IDao theDao,
String theResourceName,
JpaStorageSettings theStorageSettings,
HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory,
@ -235,7 +232,6 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
FhirContext theContext,
IIdHelperService theIdHelperService,
Class<? extends IBaseResource> theResourceType) {
myCallingDao = theDao;
myResourceName = theResourceName;
myResourceType = theResourceType;
myStorageSettings = theStorageSettings;
@ -426,15 +422,15 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
if (theSearchRuntimeDetails != null) {
theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(SearchRuntimeDetails.class, theSearchRuntimeDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster,
theRequest,
Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE,
params);
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) {
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(SearchRuntimeDetails.class, theSearchRuntimeDetails);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params);
}
}
// can we skip the database entirely and return the pid list from here?
@ -1393,8 +1389,10 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
String searchIdOrDescription = theParameters.getSearchIdOrDescription();
List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes();
boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty();
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, theParameters.getRequestDetails())) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) {
CurrentThreadCaptureQueriesListener.startCapturing();
}
if (matches.isEmpty()) {
@ -1498,17 +1496,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
w.getMillisAndRestart(),
searchIdOrDescription);
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, request)) {
callRawSqlHookWithCurrentThreadQueries(request);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) {
callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster);
}
// Interceptor call: STORAGE_PREACCESS_RESOURCES
// This can be used to remove results from the search result details before
// the user has a chance to know that they were in the results
if (!allAdded.isEmpty()) {
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.STORAGE_PREACCESS_RESOURCES, myInterceptorBroadcaster, request)) {
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) {
List<JpaPid> includedPidList = new ArrayList<>(allAdded);
JpaPreResourceAccessDetails accessDetails =
new JpaPreResourceAccessDetails(includedPidList, () -> this);
@ -1516,8 +1513,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, request)
.addIfMatchesType(ServletRequestDetails.class, request);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, request, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params);
for (int i = includedPidList.size() - 1; i >= 0; i--) {
if (accessDetails.isDontReturnResourceAtIndex(i)) {
@ -1813,17 +1809,18 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
/**
* Calls Performance Trace Hook
* @param request the request deatils
* Sends a raw SQL query to the Pointcut for raw SQL queries.
*
* @param request the request deatils
* Sends a raw SQL query to the Pointcut for raw SQL queries.
*/
private void callRawSqlHookWithCurrentThreadQueries(RequestDetails request) {
private void callRawSqlHookWithCurrentThreadQueries(
RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) {
SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing();
HookParams params = new HookParams()
.add(RequestDetails.class, request)
.addIfMatchesType(ServletRequestDetails.class, request)
.add(SqlQueryList.class, capturedQueries);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, request, Pointcut.JPA_PERFTRACE_RAW_SQL, params);
theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params);
}
@Nullable
@ -2095,16 +2092,19 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
indexStrings.sort(Comparator.naturalOrder());
// Interceptor broadcast: JPA_PERFTRACE_INFO
String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0);
StorageProcessingMessage msg = new StorageProcessingMessage()
.setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: "
+ indexStringForLog);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, msg);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params);
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) {
String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0);
StorageProcessingMessage msg = new StorageProcessingMessage()
.setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: "
+ indexStringForLog);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, msg);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params);
}
switch (requireNonNull(theComboParam.getComboSearchParamType())) {
case UNIQUE:
@ -2330,6 +2330,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
private final boolean myHavePerfTraceFoundIdHook;
private final SortSpec mySort;
private final Integer myOffset;
private final IInterceptorBroadcaster myCompositeBroadcaster;
private boolean myFirst = true;
private IncludesIterator myIncludesIterator;
/**
@ -2368,16 +2369,16 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
mySort = myParams.getSort();
myOffset = myParams.getOffset();
myRequest = theRequest;
myCompositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
// everything requires fetching recursively all related resources
if (myParams.getEverythingMode() != null) {
myFetchIncludesForEverythingOperation = true;
}
myHavePerfTraceFoundIdHook = CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, myInterceptorBroadcaster, myRequest);
myHaveRawSqlHooks = CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, myRequest);
myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID);
myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL);
}
private void fetchNext() {
@ -2479,7 +2480,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
} finally {
// search finished - fire hooks
if (myHaveRawSqlHooks) {
callRawSqlHookWithCurrentThreadQueries(myRequest);
callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster);
}
}
@ -2488,8 +2489,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(SearchRuntimeDetails.class, mySearchRuntimeDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params);
myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params);
myFirst = false;
}
@ -2498,8 +2498,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(SearchRuntimeDetails.class, mySearchRuntimeDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params);
myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params);
}
}
@ -2523,8 +2522,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
HookParams params = new HookParams()
.add(Integer.class, System.identityHashCode(this))
.add(Object.class, theNextLong);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params);
myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params);
}
private void sendProcessingMsgAndFirePerformanceHook() {
@ -2627,14 +2625,18 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING);
}
private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut pointcut) {
StorageProcessingMessage message = new StorageProcessingMessage();
message.setMessage(theMessage);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, message);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, pointcut, params);
private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(thePointcut)) {
StorageProcessingMessage message = new StorageProcessingMessage();
message.setMessage(theMessage);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, message);
compositeBroadcaster.callHooks(thePointcut, params);
}
}
public static int getMaximumPageSize() {

View File

@ -53,14 +53,17 @@ public class StorageInterceptorHooksFacade {
SearchParameterMap theParams,
Search search,
RequestPartitionId theRequestPartitionId) {
HookParams params = new HookParams()
.add(ICachedSearchDetails.class, search)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(SearchParameterMap.class, theParams)
.add(RequestPartitionId.class, theRequestPartitionId);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params);
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PRESEARCH_REGISTERED)) {
HookParams params = new HookParams()
.add(ICachedSearchDetails.class, search)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(SearchParameterMap.class, theParams)
.add(RequestPartitionId.class, theRequestPartitionId);
compositeBroadcaster.callHooks(Pointcut.STORAGE_PRESEARCH_REGISTERED, params);
}
}
// private IInterceptorBroadcaster myInterceptorBroadcaster;
}

View File

@ -407,12 +407,16 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
}
String message = builder.toString();
StorageProcessingMessage msg = new StorageProcessingMessage().setMessage(message);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, msg);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_WARNING, params);
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING)) {
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, msg);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_WARNING, params);
}
}
/**

View File

@ -109,15 +109,18 @@ public class UriPredicateBuilder extends BaseSearchParamPredicateBuilder {
+ "] param[" + theParamName + "]";
ourLog.info(msg);
StorageProcessingMessage message = new StorageProcessingMessage();
ourLog.warn(msg);
message.setMessage(msg);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(StorageProcessingMessage.class, message);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params);
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(
myInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING)) {
StorageProcessingMessage message = new StorageProcessingMessage();
message.setMessage(msg);
HookParams params = new HookParams()
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(StorageProcessingMessage.class, message);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_WARNING, params);
}
long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(
getPartitionSettings(), getRequestPartitionId(), getResourceType(), theParamName);

View File

@ -26,7 +26,6 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
@ -95,7 +94,6 @@ public class SearchTask implements Callable<Void> {
protected final FhirContext myContext;
protected final ISearchResultCacheSvc mySearchResultCacheSvc;
private final SearchParameterMap myParams;
private final IDao myCallingDao;
private final String myResourceType;
private final ArrayList<JpaPid> mySyncedPids = new ArrayList<>();
private final CountDownLatch myInitialCollectionLatch = new CountDownLatch(1);
@ -113,6 +111,7 @@ public class SearchTask implements Callable<Void> {
private final JpaStorageSettings myStorageSettings;
private final ISearchCacheSvc mySearchCacheSvc;
private final IPagingProvider myPagingProvider;
private final IInterceptorBroadcaster myCompositeBroadcaster;
private Search mySearch;
private boolean myAbortRequested;
private int myCountSavedTotal = 0;
@ -149,7 +148,6 @@ public class SearchTask implements Callable<Void> {
// values
myOnRemove = theCreationParams.OnRemove;
mySearch = theCreationParams.Search;
myCallingDao = theCreationParams.CallingDao;
myParams = theCreationParams.Params;
myResourceType = theCreationParams.ResourceType;
myRequest = theCreationParams.Request;
@ -158,9 +156,11 @@ public class SearchTask implements Callable<Void> {
myLoadingThrottleForUnitTests = theCreationParams.getLoadingThrottleForUnitTests();
mySearchRuntimeDetails = new SearchRuntimeDetails(myRequest, mySearch.getUuid());
mySearchRuntimeDetails.setQueryString(myParams.toNormalizedQueryString(myCallingDao.getContext()));
mySearchRuntimeDetails.setQueryString(myParams.toNormalizedQueryString(myContext));
myRequestPartitionId = theCreationParams.RequestPartitionId;
myParentTransaction = ElasticApm.currentTransaction();
myCompositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, myRequest);
}
protected RequestPartitionId getRequestPartitionId() {
@ -203,7 +203,7 @@ public class SearchTask implements Callable<Void> {
private ISearchBuilder newSearchBuilder() {
Class<? extends IBaseResource> resourceTypeClass =
myContext.getResourceDefinition(myResourceType).getImplementingClass();
return mySearchBuilderFactory.newSearchBuilder(myCallingDao, myResourceType, resourceTypeClass);
return mySearchBuilderFactory.newSearchBuilder(myResourceType, resourceTypeClass);
}
@Nonnull
@ -280,7 +280,7 @@ public class SearchTask implements Callable<Void> {
.withRequest(myRequest)
.withRequestPartitionId(myRequestPartitionId)
.withPropagation(Propagation.REQUIRES_NEW)
.execute(() -> doSaveSearch());
.execute(this::doSaveSearch);
}
@SuppressWarnings("rawtypes")
@ -307,8 +307,7 @@ public class SearchTask implements Callable<Void> {
.add(RequestDetails.class, mySearchRuntimeDetails.getRequestDetails())
.addIfMatchesType(
ServletRequestDetails.class, mySearchRuntimeDetails.getRequestDetails());
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
myCompositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params);
for (int i = unsyncedPids.size() - 1; i >= 0; i--) {
if (accessDetails.isDontReturnResourceAtIndex(i)) {
@ -454,15 +453,13 @@ public class SearchTask implements Callable<Void> {
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(SearchRuntimeDetails.class, mySearchRuntimeDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_COMPLETE, params);
myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_COMPLETE, params);
} else {
HookParams params = new HookParams()
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(SearchRuntimeDetails.class, mySearchRuntimeDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_PASS_COMPLETE, params);
myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_PASS_COMPLETE, params);
}
ourLog.trace(
@ -516,8 +513,7 @@ public class SearchTask implements Callable<Void> {
.add(RequestDetails.class, myRequest)
.addIfMatchesType(ServletRequestDetails.class, myRequest)
.add(SearchRuntimeDetails.class, mySearchRuntimeDetails);
CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FAILED, params);
myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FAILED, params);
saveSearch();
span.captureException(t);

View File

@ -117,7 +117,6 @@ import org.hibernate.search.mapper.orm.Search;
import org.hibernate.search.mapper.orm.common.EntityReference;
import org.hibernate.search.mapper.orm.session.SearchSession;
import org.hibernate.search.mapper.pojo.massindexing.impl.PojoMassIndexingLoggingMonitor;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
import org.hl7.fhir.convertors.advisors.impl.BaseAdvisor_40_50;
import org.hl7.fhir.convertors.context.ConversionContext40_50;
@ -284,9 +283,6 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
@Autowired
private HibernatePropertiesProvider myHibernatePropertiesProvider;
@Autowired
private CachingValidationSupport myCachingValidationSupport;
@Autowired
private VersionCanonicalizer myVersionCanonicalizer;
@ -2509,9 +2505,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
* results while they test changes, which is probably a worthwhile sacrifice
*/
private void afterValueSetExpansionStatusChange() {
// TODO: JA2 - Move this caching into the memorycacheservice, and only purge the
// relevant individual cache
myCachingValidationSupport.invalidateCaches();
provideValidationSupport().invalidateCaches();
}
private synchronized boolean isPreExpandingValueSets() {

View File

@ -20,6 +20,9 @@
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.model.dstu2.resource.Subscription;
import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum;
@ -29,31 +32,32 @@ import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.ServerOperationInterceptorAdapter;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import static ca.uhn.fhir.subscription.SubscriptionConstants.ORDER_SUBSCRIPTION_ACTIVATING;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* Interceptor which requires newly created {@link Subscription subscriptions} to be in
* {@link SubscriptionStatusEnum#REQUESTED} state and prevents clients from changing the status.
*/
public class SubscriptionsRequireManualActivationInterceptorDstu2 extends ServerOperationInterceptorAdapter {
@Interceptor
public class SubscriptionsRequireManualActivationInterceptorDstu2 {
@Autowired
@Qualifier("mySubscriptionDaoDstu2")
private IFhirResourceDao<Subscription> myDao;
@Override
@Hook(value = Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, order = ORDER_SUBSCRIPTION_ACTIVATING)
public void resourceCreated(RequestDetails theRequest, IBaseResource theResource) {
if (myDao.getContext().getResourceType(theResource).equals(ResourceTypeEnum.SUBSCRIPTION.getCode())) {
verifyStatusOk(RestOperationTypeEnum.CREATE, null, theResource);
}
}
@Override
@Hook(value = Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, order = ORDER_SUBSCRIPTION_ACTIVATING)
public void resourceUpdated(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) {
if (myDao.getContext().getResourceType(theNewResource).equals(ResourceTypeEnum.SUBSCRIPTION.getCode())) {
verifyStatusOk(RestOperationTypeEnum.UPDATE, theOldResource, theNewResource);

View File

@ -20,13 +20,15 @@
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.ServerOperationInterceptorAdapter;
import org.hl7.fhir.dstu3.model.Subscription;
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionChannelType;
import org.hl7.fhir.dstu3.model.Subscription.SubscriptionStatus;
@ -34,26 +36,28 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import static ca.uhn.fhir.subscription.SubscriptionConstants.ORDER_SUBSCRIPTION_ACTIVATING;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* Interceptor which requires newly created {@link Subscription subscriptions} to be in
* {@link SubscriptionStatus#REQUESTED} state and prevents clients from changing the status.
*/
public class SubscriptionsRequireManualActivationInterceptorDstu3 extends ServerOperationInterceptorAdapter {
@Interceptor
public class SubscriptionsRequireManualActivationInterceptorDstu3 {
@Autowired
@Qualifier("mySubscriptionDaoDstu3")
private IFhirResourceDao<Subscription> myDao;
@Override
@Hook(value = Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, order = ORDER_SUBSCRIPTION_ACTIVATING)
public void resourceCreated(RequestDetails theRequest, IBaseResource theResource) {
if (myDao.getContext().getResourceType(theResource).equals(ResourceTypeEnum.SUBSCRIPTION.getCode())) {
verifyStatusOk(RestOperationTypeEnum.CREATE, null, theResource);
}
}
@Override
@Hook(value = Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, order = ORDER_SUBSCRIPTION_ACTIVATING)
public void resourceUpdated(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) {
if (myDao.getContext().getResourceType(theNewResource).equals(ResourceTypeEnum.SUBSCRIPTION.getCode())) {
verifyStatusOk(RestOperationTypeEnum.UPDATE, theOldResource, theNewResource);

View File

@ -20,13 +20,15 @@
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.model.dstu2.valueset.ResourceTypeEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.ServerOperationInterceptorAdapter;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.Subscription;
import org.hl7.fhir.r4.model.Subscription.SubscriptionChannelType;
@ -34,26 +36,28 @@ import org.hl7.fhir.r4.model.Subscription.SubscriptionStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import static ca.uhn.fhir.subscription.SubscriptionConstants.ORDER_SUBSCRIPTION_ACTIVATING;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* Interceptor which requires newly created {@link Subscription subscriptions} to be in
* {@link SubscriptionStatus#REQUESTED} state and prevents clients from changing the status.
*/
public class SubscriptionsRequireManualActivationInterceptorR4 extends ServerOperationInterceptorAdapter {
@Interceptor
public class SubscriptionsRequireManualActivationInterceptorR4 {
@Autowired
@Qualifier("mySubscriptionDaoR4")
private IFhirResourceDao<Subscription> myDao;
@Override
@Hook(value = Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, order = ORDER_SUBSCRIPTION_ACTIVATING)
public void resourceCreated(RequestDetails theRequest, IBaseResource theResource) {
if (myDao.getContext().getResourceType(theResource).equals(ResourceTypeEnum.SUBSCRIPTION.getCode())) {
verifyStatusOk(RestOperationTypeEnum.CREATE, null, theResource);
}
}
@Override
@Hook(value = Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, order = ORDER_SUBSCRIPTION_ACTIVATING)
public void resourceUpdated(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) {
if (myDao.getContext().getResourceType(theNewResource).equals(ResourceTypeEnum.SUBSCRIPTION.getCode())) {
verifyStatusOk(RestOperationTypeEnum.UPDATE, theOldResource, theNewResource);

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.validation;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.jpa.config.JpaConfig;
import ca.uhn.fhir.jpa.packages.NpmJpaValidationSupport;
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
@ -39,7 +40,7 @@ public class JpaValidationSupportChain extends ValidationSupportChain {
private final FhirContext myFhirContext;
@Autowired
@Qualifier("myJpaValidationSupport")
@Qualifier(JpaConfig.JPA_VALIDATION_SUPPORT)
public IValidationSupport myJpaValidationSupport;
@Qualifier("myDefaultProfileValidationSupport")
@ -64,7 +65,13 @@ public class JpaValidationSupportChain extends ValidationSupportChain {
/**
* Constructor
*/
public JpaValidationSupportChain(FhirContext theFhirContext) {
public JpaValidationSupportChain(
FhirContext theFhirContext, ValidationSupportChain.CacheConfiguration theCacheConfiguration) {
super(theCacheConfiguration);
assert theFhirContext != null;
assert theCacheConfiguration != null;
myFhirContext = theFhirContext;
}

View File

@ -214,9 +214,7 @@ public class JpaBulkExportProcessorTest {
when(myBulkExportHelperService.createSearchParameterMapsForResourceType(any(RuntimeResourceDefinition.class), eq(parameters), any(boolean.class)))
.thenReturn(maps);
// from getSearchBuilderForLocalResourceType
when(myDaoRegistry.getResourceDao(anyString()))
.thenReturn(mockDao);
when(mySearchBuilderFactory.newSearchBuilder(eq(mockDao), eq(parameters.getResourceType()), any()))
when(mySearchBuilderFactory.newSearchBuilder(eq(parameters.getResourceType()), any()))
.thenReturn(searchBuilder);
// ret
when(searchBuilder.createQuery(
@ -304,9 +302,7 @@ public class JpaBulkExportProcessorTest {
when(myBulkExportHelperService.createSearchParameterMapsForResourceType(any(RuntimeResourceDefinition.class), eq(parameters), any(boolean.class)))
.thenReturn(Collections.singletonList(new SearchParameterMap()));
// from getSearchBuilderForLocalResourceType
when(myDaoRegistry.getResourceDao(not(eq("Group"))))
.thenReturn(mockDao);
when(mySearchBuilderFactory.newSearchBuilder(eq(mockDao), eq(parameters.getResourceType()), any()))
when(mySearchBuilderFactory.newSearchBuilder(eq(parameters.getResourceType()), any()))
.thenReturn(searchBuilder);
// ret
when(searchBuilder.createQuery(
@ -432,9 +428,7 @@ public class JpaBulkExportProcessorTest {
when(myIdHelperService.getPidOrNull(eq(getPartitionIdFromParams(thePartitioned)), eq(groupResource)))
.thenReturn(groupId);
// getMembersFromGroupWithFilter
when(myDaoRegistry.getResourceDao(eq("Patient")))
.thenReturn(patientDao);
when(mySearchBuilderFactory.newSearchBuilder(eq(patientDao), eq("Patient"), eq(Patient.class)))
when(mySearchBuilderFactory.newSearchBuilder(eq("Patient"), eq(Patient.class)))
.thenReturn(patientSearchBuilder);
RuntimeResourceDefinition patientDef = myFhirContext.getResourceDefinition("Patient");
SearchParameterMap patientSpMap = new SearchParameterMap();
@ -447,9 +441,7 @@ public class JpaBulkExportProcessorTest {
RuntimeResourceDefinition observationDef = myFhirContext.getResourceDefinition("Observation");
when(myBulkExportHelperService.createSearchParameterMapsForResourceType(eq(observationDef), eq(parameters), any(boolean.class)))
.thenReturn(Collections.singletonList(observationSpMap));
when(myDaoRegistry.getResourceDao((eq("Observation"))))
.thenReturn(observationDao);
when(mySearchBuilderFactory.newSearchBuilder(eq(observationDao), eq("Observation"), eq(Observation.class)))
when(mySearchBuilderFactory.newSearchBuilder(eq("Observation"), eq(Observation.class)))
.thenReturn(observationSearchBuilder);
when(observationSearchBuilder.loadIncludes(
any(SearchBuilderLoadIncludesParameters.class)
@ -520,10 +512,7 @@ public class JpaBulkExportProcessorTest {
any(ExportPIDIteratorParameters.class),
any(boolean.class)
)).thenReturn(Collections.singletonList(new SearchParameterMap()));
when(myDaoRegistry.getResourceDao(eq("Patient")))
.thenReturn(dao);
when(mySearchBuilderFactory.newSearchBuilder(
any(IFhirResourceDao.class),
anyString(),
any()
)).thenReturn(searchBuilder);

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -294,6 +294,20 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
}
}
@Test
public void testNoOpUpdateDoesNotModifyLastUpdated() throws InterruptedException {
myStorageSettings.setAdvancedHSearchIndexing(true);
Patient patient = new Patient();
patient.getNameFirstRep().setFamily("graham").addGiven("gary");
patient = (Patient) myPatientDao.create(patient).getResource();
Date originalLastUpdated = patient.getMeta().getLastUpdated();
patient = (Patient) myPatientDao.update(patient).getResource();
Date newLastUpdated = patient.getMeta().getLastUpdated();
assertThat(originalLastUpdated).isEqualTo(newLastUpdated);
}
@Test
public void testFullTextSearchesArePerformanceLogged() {
@ -1804,68 +1818,52 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
@Test
public void eq() {
myCaptureQueriesListener.clear();
List<String> allIds = myTestDaoSearch.searchForIds("/Observation?_lastUpdated=eq" + myOldLastUpdatedDateTime);
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("we build the bundle with no sql").isEqualTo(0);
assertThat(allIds).containsExactly(myOldObsId);
}
@Test
public void eqLessPrecisionRequest() {
myCaptureQueriesListener.clear();
List<String> allIds = myTestDaoSearch.searchForIds("/Observation?_lastUpdated=eq2017-03-24");
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("we build the bundle with no sql").isEqualTo(0);
assertThat(allIds).containsExactly(myOldObsId);
}
@Test
public void ne() {
myCaptureQueriesListener.clear();
List<String> allIds = myTestDaoSearch.searchForIds("/Observation?_lastUpdated=ne" + myOldLastUpdatedDateTime);
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("we build the bundle with no sql").isEqualTo(0);
assertThat(allIds).containsExactly(myNewObsId);
}
@Test
void gt() {
myCaptureQueriesListener.clear();
List<String> allIds = myTestDaoSearch.searchForIds("/Observation?_lastUpdated=gt2018-01-01");
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("we build the bundle with no sql").isEqualTo(0);
assertThat(allIds).containsExactly(myNewObsId);
}
@Test
public void ge() {
myCaptureQueriesListener.clear();
List<String> allIds = myTestDaoSearch.searchForIds("/Observation?_lastUpdated=ge" + myOldLastUpdatedDateTime);
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("we build the bundle with no sql").isEqualTo(0);
assertThat(allIds).containsExactly(myOldObsId, myNewObsId);
}
@Test
void lt() {
myCaptureQueriesListener.clear();
List<String> allIds = myTestDaoSearch.searchForIds("/Observation?_lastUpdated=lt2018-01-01");
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("we build the bundle with no sql").isEqualTo(0);
assertThat(allIds).containsExactly(myOldObsId);
}
@Test
public void le() {
myCaptureQueriesListener.clear();
List<String> allIds = myTestDaoSearch.searchForIds("/Observation?_lastUpdated=le" + myOldLastUpdatedDateTime);
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().size()).as("we build the bundle with no sql").isEqualTo(0);
assertThat(allIds).containsExactly(myOldObsId);
}
}
@Nested

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.model.search;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.model.dstu2.composite.CodingDt;
import com.google.common.collect.HashMultimap;
@ -57,12 +58,17 @@ public class ExtendedHSearchIndexData {
private String myForcedId;
private String myResourceJSON;
private IBaseResource myResource;
private ResourceTable myEntity;
public ExtendedHSearchIndexData(
FhirContext theFhirContext, StorageSettings theStorageSettings, IBaseResource theResource) {
FhirContext theFhirContext,
StorageSettings theStorageSettings,
IBaseResource theResource,
ResourceTable theEntity) {
this.myFhirContext = theFhirContext;
this.myStorageSettings = theStorageSettings;
myResource = theResource;
myEntity = theEntity;
}
private <V> BiConsumer<String, V> ifNotContained(BiConsumer<String, V> theIndexWriter) {

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -941,8 +941,9 @@ public class SearchParamExtractorService {
if (myPartitionSettings.getAllowReferencesAcrossPartitions() == ALLOWED_UNQUALIFIED) {
// Interceptor: Pointcut.JPA_CROSS_PARTITION_REFERENCE_DETECTED
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, myInterceptorBroadcaster, theRequest)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE)) {
CrossPartitionReferenceDetails referenceDetails = new CrossPartitionReferenceDetails(
theRequestPartitionId,
theSourceResourceName,
@ -950,12 +951,8 @@ public class SearchParamExtractorService {
theRequest,
theTransactionDetails);
HookParams params = new HookParams(referenceDetails);
targetResource =
(IResourceLookup<JpaPid>) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(
myInterceptorBroadcaster,
theRequest,
Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE,
params);
targetResource = (IResourceLookup<JpaPid>) compositeBroadcaster.callHooksAndReturnObject(
Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, params);
} else {
targetResource = myResourceLinkResolver.findTargetResource(
RequestPartitionId.allPartitions(),
@ -1089,8 +1086,9 @@ public class SearchParamExtractorService {
}
// If extraction generated any warnings, broadcast an error
if (CompositeInterceptorBroadcaster.hasHooks(
Pointcut.JPA_PERFTRACE_WARNING, theInterceptorBroadcaster, theRequestDetails)) {
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(theInterceptorBroadcaster, theRequestDetails);
if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING)) {
for (String next : theSearchParamSet.getWarnings()) {
StorageProcessingMessage messageHolder = new StorageProcessingMessage();
messageHolder.setMessage(next);
@ -1098,8 +1096,7 @@ public class SearchParamExtractorService {
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
.add(StorageProcessingMessage.class, messageHolder);
CompositeInterceptorBroadcaster.doCallHooks(
theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params);
compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_WARNING, params);
}
}
}

View File

@ -1,14 +1,20 @@
package ca.uhn.fhir.jpa.searchparam.extractor;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.test.utilities.MockInvoker;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
@ -36,14 +42,17 @@ public class SearchParamExtractorServiceTest {
searchParamSet.addWarning("help i'm a bug");
searchParamSet.addWarning("Spiff");
when(myJpaInterceptorBroadcaster.hasHooks(any())).thenReturn(true);
when(myJpaInterceptorBroadcaster.callHooks(any(), any())).thenReturn(true);
AtomicInteger counter = new AtomicInteger();
when(myJpaInterceptorBroadcaster.hasHooks(eq(Pointcut.JPA_PERFTRACE_WARNING))).thenReturn(true);
when(myJpaInterceptorBroadcaster.getInvokersForPointcut(eq(Pointcut.JPA_PERFTRACE_WARNING))).thenReturn(MockInvoker.list((Consumer<HookParams>) params->counter.incrementAndGet()));
ServletRequestDetails requestDetails = new ServletRequestDetails(myRequestInterceptorBroadcaster);
SearchParamExtractorService.handleWarnings(requestDetails, myJpaInterceptorBroadcaster, searchParamSet);
verify(myJpaInterceptorBroadcaster, times(2)).callHooks(eq(Pointcut.JPA_PERFTRACE_WARNING), any());
verify(myRequestInterceptorBroadcaster, times(2)).callHooks(eq(Pointcut.JPA_PERFTRACE_WARNING), any());
verify(myJpaInterceptorBroadcaster, times(3)).hasHooks(eq(Pointcut.JPA_PERFTRACE_WARNING));
verify(myRequestInterceptorBroadcaster, times(2)).hasHooks(eq(Pointcut.JPA_PERFTRACE_WARNING));
assertEquals(2, counter.get());
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -118,12 +118,15 @@ public class SubscriptionMatcherInterceptor {
ResourceModifiedMessage msg = createResourceModifiedMessage(theNewResource, theOperationType, theRequest);
// Interceptor call: SUBSCRIPTION_RESOURCE_MODIFIED
HookParams params = new HookParams().add(ResourceModifiedMessage.class, msg);
boolean outcome = CompositeInterceptorBroadcaster.doCallHooks(
myInterceptorBroadcaster, theRequest, Pointcut.SUBSCRIPTION_RESOURCE_MODIFIED, params);
IInterceptorBroadcaster compositeBroadcaster =
CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest);
if (compositeBroadcaster.hasHooks(Pointcut.SUBSCRIPTION_RESOURCE_MODIFIED)) {
HookParams params = new HookParams().add(ResourceModifiedMessage.class, msg);
boolean outcome = compositeBroadcaster.callHooks(Pointcut.SUBSCRIPTION_RESOURCE_MODIFIED, params);
if (!outcome) {
return;
if (!outcome) {
return;
}
}
processResourceModifiedMessage(msg);

View File

@ -64,6 +64,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static ca.uhn.fhir.subscription.SubscriptionConstants.ORDER_SUBSCRIPTION_VALIDATING;
import static org.apache.commons.lang3.StringUtils.isBlank;
@Interceptor
@ -92,14 +93,14 @@ public class SubscriptionValidatingInterceptor {
@Autowired
private SubscriptionChannelTypeValidatorFactory mySubscriptionChannelTypeValidatorFactory;
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
@Hook(value = Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, order = ORDER_SUBSCRIPTION_VALIDATING)
public void resourcePreCreate(
IBaseResource theResource, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
validateSubmittedSubscription(
theResource, theRequestDetails, theRequestPartitionId, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED);
}
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
@Hook(value = Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, order = ORDER_SUBSCRIPTION_VALIDATING)
public void resourceUpdated(
IBaseResource theOldResource,
IBaseResource theResource,

View File

@ -383,8 +383,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
String resourceType = myFhirContext.getResourceType(theJobDetails.getCurrentSearchResourceType());
RuntimeResourceDefinition resourceDef =
myFhirContext.getResourceDefinition(theJobDetails.getCurrentSearchResourceType());
ISearchBuilder searchBuilder = mySearchBuilderFactory.newSearchBuilder(
resourceDao, resourceType, resourceDef.getImplementingClass());
ISearchBuilder searchBuilder =
mySearchBuilderFactory.newSearchBuilder(resourceType, resourceDef.getImplementingClass());
List<IBaseResource> listToPopulate = new ArrayList<>();
myTransactionService.withRequest(null).execute(() -> {

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -113,7 +113,7 @@ public class SubscriptionsDstu2Test extends BaseResourceProviderDstu2Test {
myClient.create().resource(subs).execute();
fail("");
} catch (UnprocessableEntityException e) {
assertThat(e.getMessage()).contains("Unknown SubscriptionStatus code 'aaaaa'");
assertThat(e.getMessage()).containsAnyOf("invalid value aaaaa", "Unknown SubscriptionStatus code 'aaaaa'");
}
}

View File

@ -52,7 +52,7 @@ public class BaseSearchSvc {
protected static final FhirContext ourCtx = FhirContext.forDstu3Cached();
public void after() {
verify(mySearchBuilderFactory, atMost(myExpectedNumberOfSearchBuildersCreated)).newSearchBuilder(any(), any(), any());
verify(mySearchBuilderFactory, atMost(myExpectedNumberOfSearchBuildersCreated)).newSearchBuilder(any(), any());
}
protected List<JpaPid> createPidSequence(int to) {

View File

@ -76,6 +76,7 @@ import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import static org.mockito.Mockito.when;
@SuppressWarnings({"unchecked"})
@ -91,7 +92,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
@Mock
private ISearchResultCacheSvc mySearchResultCacheSvc;
private Search myCurrentSearch;
@Mock
@Mock(strictness = Mock.Strictness.STRICT_STUBS)
private IInterceptorBroadcaster myInterceptorBroadcaster;
@Mock
private SearchBuilderFactory<JpaPid> mySearchBuilderFactory;
@ -289,7 +290,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
}
private void initSearches() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any())).thenReturn(mySearchBuilder);
when(mySearchBuilderFactory.newSearchBuilder(any(), any())).thenReturn(mySearchBuilder);
}
private void initAsyncSearches() {
@ -318,8 +319,8 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
SlowIterator iter = new SlowIterator(pids.iterator(), 500);
when(mySearchBuilder.createQuery(same(params), any(), any(), nullable(RequestPartitionId.class))).thenReturn(iter);
mockSearchTask();
when(myInterceptorBroadcaster.callHooks(any(), any()))
.thenReturn(true);
when(myInterceptorBroadcaster.hasHooks(any())).thenReturn(true);
when(myInterceptorBroadcaster.getInvokersForPointcut(any())).thenReturn(List.of());
ourLog.info("Registering the first search");
new Thread(() -> mySvc.registerSearch(myCallingDao, params, "Patient", new CacheControlDirective(), null, RequestPartitionId.allPartitions())).start();
@ -437,7 +438,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
@Test
public void testLoadSearchResultsFromDifferentCoordinator() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any())).thenReturn(mySearchBuilder);
when(mySearchBuilderFactory.newSearchBuilder(any(), any())).thenReturn(mySearchBuilder);
final String uuid = UUID.randomUUID().toString();
@ -517,7 +518,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
@Test
public void testSynchronousSearch() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any())).thenReturn(mySearchBuilder);
when(mySearchBuilderFactory.newSearchBuilder(any(), any())).thenReturn(mySearchBuilder);
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
@ -531,7 +532,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
@Test
public void testSynchronousSearchWithOffset() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any())).thenReturn(mySearchBuilder);
when(mySearchBuilderFactory.newSearchBuilder(any(), any())).thenReturn(mySearchBuilder);
SearchParameterMap params = new SearchParameterMap();
params.setOffset(10);
@ -544,7 +545,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
@Test
public void testSynchronousSearchUpTo() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any())).thenReturn(mySearchBuilder);
when(mySearchBuilderFactory.newSearchBuilder(any(), any())).thenReturn(mySearchBuilder);
int loadUpto = 30;
SearchParameterMap params = new SearchParameterMap();
@ -584,7 +585,6 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
@Test
public void testFetchAllResultsReturnsNull() {
when(myDaoRegistry.getResourceDao(anyString())).thenReturn(myCallingDao);
when(myCallingDao.getContext()).thenReturn(ourCtx);
Search search = new Search();
search.setUuid("0000-1111");

View File

@ -41,7 +41,7 @@ public class SynchronousSearchSvcImplTest extends BaseSearchSvc {
@Test
public void testSynchronousSearch() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any()))
when(mySearchBuilderFactory.newSearchBuilder(any(), any()))
.thenReturn(mySearchBuilder);
SearchParameterMap params = new SearchParameterMap();
@ -65,7 +65,7 @@ public class SynchronousSearchSvcImplTest extends BaseSearchSvc {
@Test
public void testSynchronousSearchWithOffset() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any())).thenReturn(mySearchBuilder);
when(mySearchBuilderFactory.newSearchBuilder(any(), any())).thenReturn(mySearchBuilder);
SearchParameterMap params = new SearchParameterMap();
params.setCount(10);
@ -87,7 +87,7 @@ public class SynchronousSearchSvcImplTest extends BaseSearchSvc {
@Test
public void testSynchronousSearchUpTo() {
when(mySearchBuilderFactory.newSearchBuilder(any(), any(), any())).thenReturn(mySearchBuilder);
when(mySearchBuilderFactory.newSearchBuilder(any(), any())).thenReturn(mySearchBuilder);
when(myStorageSettings.getDefaultTotalMode()).thenReturn(null);
SearchParameterMap params = new SearchParameterMap();

View File

@ -15,6 +15,7 @@ import java.util.List;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
public class TerminologySvcImplDstu2Test extends BaseJpaDstu2Test {
@ -29,6 +30,8 @@ public class TerminologySvcImplDstu2Test extends BaseJpaDstu2Test {
List<FhirVersionIndependentConcept> concepts;
Set<String> codes;
when(mySrd.getInterceptorBroadcaster()).thenReturn(null);
ValueSet upload = new ValueSet();
upload.setId(new IdDt("testVs"));
upload.setUrl("http://myVs");
@ -61,6 +64,8 @@ public class TerminologySvcImplDstu2Test extends BaseJpaDstu2Test {
List<FhirVersionIndependentConcept> concepts;
Set<String> codes;
when(mySrd.getInterceptorBroadcaster()).thenReturn(null);
ValueSet upload = new ValueSet();
upload.setId(new IdDt("testVs"));
upload.setUrl("http://myVs");

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -58,8 +58,6 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3TerminologyTest.class);
@Autowired
private CachingValidationSupport myCachingValidationSupport;
@Autowired
private ITermDeferredStorageSvc myTermDeferredStorageSvc;
@AfterEach
@ -69,10 +67,12 @@ public class FhirResourceDaoDstu3TerminologyTest extends BaseJpaDstu3Test {
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
}
@Override
@BeforeEach
public void before() {
public void before() throws Exception {
super.before();
myStorageSettings.setMaximumExpansionSize(5000);
myCachingValidationSupport.invalidateCaches();
myValidationSupport.invalidateCaches();
}
private CodeSystem createExternalCs() {

View File

@ -14,7 +14,6 @@ import ca.uhn.fhir.test.utilities.ProxyUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.validation.IValidatorModule;
import org.apache.commons.io.IOUtils;
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
@ -55,8 +54,6 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
@Autowired
private IValidatorModule myValidatorModule;
@Autowired
private CachingValidationSupport myValidationSupport;
@Autowired
private FhirInstanceValidator myFhirInstanceValidator;
@Autowired
@Qualifier(JpaConfig.JPA_VALIDATION_SUPPORT)

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.7-SNAPSHOT</version>
<version>7.7.8-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

Some files were not shown because too many files have changed in this diff Show More