mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-02-18 10:55:22 +00:00
merge master v2
This commit is contained in:
commit
626bb8ed52
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -27,51 +27,6 @@ import java.util.function.Predicate;
|
||||
|
||||
public interface IBaseInterceptorService<POINTCUT extends IPointcut> extends IBaseInterceptorBroadcaster<POINTCUT> {
|
||||
|
||||
/**
|
||||
* Register an interceptor that will be used in a {@link ThreadLocal} context.
|
||||
* This means that events will only be broadcast to the given interceptor if
|
||||
* they were fired from the current thread.
|
||||
* <p>
|
||||
* Note that it is almost always desirable to call this method with a
|
||||
* try-finally statement that removes the interceptor afterwards, since
|
||||
* this can lead to memory leakage, poor performance due to ever-increasing
|
||||
* numbers of interceptors, etc.
|
||||
* </p>
|
||||
* <p>
|
||||
* Note that most methods such as {@link #getAllRegisteredInterceptors()} and
|
||||
* {@link #unregisterAllInterceptors()} do not affect thread local interceptors
|
||||
* as they are kept in a separate list.
|
||||
* </p>
|
||||
* <p>
|
||||
* ThreadLocal interceptors are now disabled by default as of HAPI FHIR 6.2.0 and must be manually
|
||||
* enabled by calling {@link ca.uhn.fhir.interceptor.executor.BaseInterceptorService#setThreadlocalInvokersEnabled(boolean)}.
|
||||
* They are now deprecated. Registering a threadlocal interceptor without enabling this feature will
|
||||
* result in a {@link IllegalArgumentException}.
|
||||
* </p>
|
||||
*
|
||||
* @param theInterceptor The interceptor
|
||||
* @return Returns <code>true</code> if at least one valid hook method was found on this interceptor
|
||||
* @deprecated Threadlocal interceptors have been deprecated as of HAPI FHIR 6.2.0 and will be removed in a future release due to lack of use. If you feel that this is a bad decision, please speak up on the HAPI FHIR mailing list.
|
||||
*/
|
||||
@Deprecated
|
||||
boolean registerThreadLocalInterceptor(Object theInterceptor);
|
||||
|
||||
/**
|
||||
* Unregisters a ThreadLocal interceptor
|
||||
* <p>
|
||||
* ThreadLocal interceptors are now disabled by default as of HAPI FHIR 6.2.0 and must be manually
|
||||
* enabled by calling {@link ca.uhn.fhir.interceptor.executor.BaseInterceptorService#setThreadlocalInvokersEnabled(boolean)}.
|
||||
* They are now deprecated. Registering a threadlocal interceptor without enabling this feature will
|
||||
* result in a {@link IllegalArgumentException}.
|
||||
* </p>
|
||||
*
|
||||
* @param theInterceptor The interceptor
|
||||
* @see #registerThreadLocalInterceptor(Object)
|
||||
* @deprecated Threadlocal interceptors have been deprecated as of HAPI FHIR 6.2.0 and will be removed in a future release due to lack of use. If you feel that this is a bad decision, please speak up on the HAPI FHIR mailing list.
|
||||
*/
|
||||
@Deprecated
|
||||
void unregisterThreadLocalInterceptor(Object theInterceptor);
|
||||
|
||||
/**
|
||||
* Register an interceptor. This method has no effect if the given interceptor is already registered.
|
||||
*
|
||||
@ -94,8 +49,7 @@ public interface IBaseInterceptorService<POINTCUT extends IPointcut> extends IBa
|
||||
List<Object> getAllRegisteredInterceptors();
|
||||
|
||||
/**
|
||||
* Unregisters all registered interceptors. Note that this method does not unregister
|
||||
* any {@link #registerThreadLocalInterceptor(Object) thread local interceptors}.
|
||||
* Unregisters all registered interceptors.
|
||||
*/
|
||||
void unregisterAllInterceptors();
|
||||
|
||||
|
@ -32,7 +32,6 @@ import ca.uhn.fhir.util.ReflectionUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Multimaps;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
@ -51,32 +50,33 @@ import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
// TODO: JA maybe add an enummap for pointcuts registered?
|
||||
public abstract class BaseInterceptorService<POINTCUT extends IPointcut> implements IBaseInterceptorService<POINTCUT>, IBaseInterceptorBroadcaster<POINTCUT> {
|
||||
public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & IPointcut> implements IBaseInterceptorService<POINTCUT>, IBaseInterceptorBroadcaster<POINTCUT> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseInterceptorService.class);
|
||||
private final List<Object> myInterceptors = new ArrayList<>();
|
||||
private final ListMultimap<POINTCUT, BaseInvoker> myGlobalInvokers = ArrayListMultimap.create();
|
||||
private final ListMultimap<POINTCUT, BaseInvoker> myAnonymousInvokers = ArrayListMultimap.create();
|
||||
private final Object myRegistryMutex = new Object();
|
||||
private final ThreadLocal<ListMultimap<POINTCUT, BaseInvoker>> myThreadlocalInvokers = new ThreadLocal<>();
|
||||
private final Class<POINTCUT> myPointcutType;
|
||||
private volatile EnumSet<POINTCUT> myRegisteredPointcuts;
|
||||
private String myName;
|
||||
private boolean myThreadlocalInvokersEnabled = false;
|
||||
private boolean myWarnOnInterceptorWithNoHooks = true;
|
||||
|
||||
/**
|
||||
* Constructor which uses a default name of "default"
|
||||
*/
|
||||
public BaseInterceptorService() {
|
||||
this("default");
|
||||
public BaseInterceptorService(Class<POINTCUT> thePointcutType) {
|
||||
this(thePointcutType, "default");
|
||||
}
|
||||
|
||||
/**
|
||||
@ -84,9 +84,11 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
*
|
||||
* @param theName The name for this registry (useful for troubleshooting)
|
||||
*/
|
||||
public BaseInterceptorService(String theName) {
|
||||
public BaseInterceptorService(Class<POINTCUT> thePointcutType, String theName) {
|
||||
super();
|
||||
myName = theName;
|
||||
myPointcutType = thePointcutType;
|
||||
rebuildRegisteredPointcutSet();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -96,23 +98,6 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
myWarnOnInterceptorWithNoHooks = theWarnOnInterceptorWithNoHooks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are threadlocal interceptors enabled on this registry (defaults to false)
|
||||
*/
|
||||
public boolean isThreadlocalInvokersEnabled() {
|
||||
return myThreadlocalInvokersEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are threadlocal interceptors enabled on this registry (defaults to false)
|
||||
*
|
||||
* @deprecated ThreadLocal interceptors are deprecated as of HAPI FHIR 6.2.0 and will be removed in a future release.
|
||||
*/
|
||||
@Deprecated
|
||||
public void setThreadlocalInvokersEnabled(boolean theThreadlocalInvokersEnabled) {
|
||||
myThreadlocalInvokersEnabled = theThreadlocalInvokersEnabled;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
List<Object> getGlobalInterceptorsForUnitTest() {
|
||||
return myInterceptors;
|
||||
@ -131,14 +116,15 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
if (!isInterceptorAlreadyRegistered(theInterceptor)) {
|
||||
myInterceptors.add(theInterceptor);
|
||||
}
|
||||
|
||||
rebuildRegisteredPointcutSet();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Object> getAllRegisteredInterceptors() {
|
||||
synchronized (myRegistryMutex) {
|
||||
List<Object> retVal = new ArrayList<>();
|
||||
retVal.addAll(myInterceptors);
|
||||
List<Object> retVal = new ArrayList<>(myInterceptors);
|
||||
return Collections.unmodifiableList(retVal);
|
||||
}
|
||||
}
|
||||
@ -156,14 +142,17 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
@Override
|
||||
public void unregisterInterceptors(@Nullable Collection<?> theInterceptors) {
|
||||
if (theInterceptors != null) {
|
||||
new ArrayList<>(theInterceptors).forEach(t -> unregisterInterceptor(t));
|
||||
// We construct a new list before iterating because the service's internal
|
||||
// interceptor lists get passed into this method, and we get concurrent
|
||||
// modification errors if we modify them at the same time as we iterate them
|
||||
new ArrayList<>(theInterceptors).forEach(this::unregisterInterceptor);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerInterceptors(@Nullable Collection<?> theInterceptors) {
|
||||
if (theInterceptors != null) {
|
||||
theInterceptors.forEach(t -> registerInterceptor(t));
|
||||
theInterceptors.forEach(this::registerInterceptor);
|
||||
}
|
||||
}
|
||||
|
||||
@ -187,37 +176,11 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
unregisterInterceptor(nextInvoker.getValue().getInterceptor());
|
||||
}
|
||||
}
|
||||
|
||||
rebuildRegisteredPointcutSet();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerThreadLocalInterceptor(Object theInterceptor) {
|
||||
Validate.isTrue (myThreadlocalInvokersEnabled, "Thread local interceptors are not enabled on this server");
|
||||
ListMultimap<POINTCUT, BaseInvoker> invokers = getThreadLocalInvokerMultimap();
|
||||
scanInterceptorAndAddToInvokerMultimap(theInterceptor, invokers);
|
||||
return !invokers.isEmpty();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unregisterThreadLocalInterceptor(Object theInterceptor) {
|
||||
Validate.isTrue (myThreadlocalInvokersEnabled, "Thread local interceptors are not enabled on this server");
|
||||
ListMultimap<POINTCUT, BaseInvoker> invokers = getThreadLocalInvokerMultimap();
|
||||
invokers.entries().removeIf(t -> t.getValue().getInterceptor() == theInterceptor);
|
||||
if (invokers.isEmpty()) {
|
||||
myThreadlocalInvokers.remove();
|
||||
}
|
||||
}
|
||||
|
||||
private ListMultimap<POINTCUT, BaseInvoker> getThreadLocalInvokerMultimap() {
|
||||
ListMultimap<POINTCUT, BaseInvoker> invokers = myThreadlocalInvokers.get();
|
||||
if (invokers == null) {
|
||||
invokers = Multimaps.synchronizedListMultimap(ArrayListMultimap.create());
|
||||
myThreadlocalInvokers.set(invokers);
|
||||
}
|
||||
return invokers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerInterceptor(Object theInterceptor) {
|
||||
synchronized (myRegistryMutex) {
|
||||
@ -238,10 +201,19 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
myInterceptors.add(theInterceptor);
|
||||
sortByOrderAnnotation(myInterceptors);
|
||||
|
||||
rebuildRegisteredPointcutSet();
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private void rebuildRegisteredPointcutSet() {
|
||||
EnumSet<POINTCUT> registeredPointcuts = EnumSet.noneOf(myPointcutType);
|
||||
registeredPointcuts.addAll(myAnonymousInvokers.keySet());
|
||||
registeredPointcuts.addAll(myGlobalInvokers.keySet());
|
||||
myRegisteredPointcuts = registeredPointcuts;
|
||||
}
|
||||
|
||||
private boolean isInterceptorAlreadyRegistered(Object theInterceptor) {
|
||||
for (Object next : myInterceptors) {
|
||||
if (next == theInterceptor) {
|
||||
@ -257,6 +229,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
boolean removed = myInterceptors.removeIf(t -> t == theInterceptor);
|
||||
removed |= myGlobalInvokers.entries().removeIf(t -> t.getValue().getInterceptor() == theInterceptor);
|
||||
removed |= myAnonymousInvokers.entries().removeIf(t -> t.getValue().getInterceptor() == theInterceptor);
|
||||
rebuildRegisteredPointcutSet();
|
||||
return removed;
|
||||
}
|
||||
}
|
||||
@ -286,14 +259,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
|
||||
@Override
|
||||
public boolean hasHooks(POINTCUT thePointcut) {
|
||||
return myGlobalInvokers.containsKey(thePointcut)
|
||||
|| myAnonymousInvokers.containsKey(thePointcut)
|
||||
|| hasThreadLocalHooks(thePointcut);
|
||||
}
|
||||
|
||||
private boolean hasThreadLocalHooks(POINTCUT thePointcut) {
|
||||
ListMultimap<POINTCUT, BaseInvoker> hooks = myThreadlocalInvokersEnabled ? myThreadlocalInvokers.get() : null;
|
||||
return hooks != null && hooks.containsKey(thePointcut);
|
||||
return myRegisteredPointcuts.contains(thePointcut);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -352,12 +318,6 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
List<BaseInvoker> globalInvokers = myGlobalInvokers.get(thePointcut);
|
||||
List<BaseInvoker> anonymousInvokers = myAnonymousInvokers.get(thePointcut);
|
||||
List<BaseInvoker> threadLocalInvokers = null;
|
||||
if (myThreadlocalInvokersEnabled) {
|
||||
ListMultimap<POINTCUT, BaseInvoker> pointcutToInvokers = myThreadlocalInvokers.get();
|
||||
if (pointcutToInvokers != null) {
|
||||
threadLocalInvokers = pointcutToInvokers.get(thePointcut);
|
||||
}
|
||||
}
|
||||
invokers = union(globalInvokers, anonymousInvokers, threadLocalInvokers);
|
||||
}
|
||||
|
||||
@ -368,7 +328,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
* First argument must be the global invoker list!!
|
||||
*/
|
||||
@SafeVarargs
|
||||
private final List<BaseInvoker> union(List<BaseInvoker>... theInvokersLists) {
|
||||
private List<BaseInvoker> union(List<BaseInvoker>... theInvokersLists) {
|
||||
List<BaseInvoker> haveOne = null;
|
||||
boolean haveMultiple = false;
|
||||
for (List<BaseInvoker> nextInvokerList : theInvokersLists) {
|
||||
@ -404,8 +364,8 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
|
||||
retVal = Arrays
|
||||
.stream(theInvokersLists)
|
||||
.filter(t -> t != null)
|
||||
.flatMap(t -> t.stream())
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(Collection::stream)
|
||||
.sorted()
|
||||
.collect(Collectors.toList());
|
||||
|
||||
@ -417,7 +377,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
/**
|
||||
* Only call this when assertions are enabled, it's expensive
|
||||
*/
|
||||
boolean haveAppropriateParams(POINTCUT thePointcut, HookParams theParams) {
|
||||
final boolean haveAppropriateParams(POINTCUT thePointcut, HookParams theParams) {
|
||||
if (theParams.getParamsForType().values().size() != thePointcut.getParameterTypes().size()) {
|
||||
throw new IllegalArgumentException(Msg.code(1909) + String.format("Wrong number of params for pointcut %s - Wanted %s but found %s", thePointcut.name(), toErrorString(thePointcut.getParameterTypes()), theParams.getParamsForType().values().stream().map(t -> t != null ? t.getClass().getSimpleName() : "null").sorted().collect(Collectors.toList())));
|
||||
}
|
||||
@ -449,16 +409,16 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
|
||||
// Register the interceptor and its various hooks
|
||||
for (HookInvoker nextAddedHook : addedInvokers) {
|
||||
IPointcut nextPointcut = nextAddedHook.getPointcut();
|
||||
POINTCUT nextPointcut = nextAddedHook.getPointcut();
|
||||
if (nextPointcut.equals(Pointcut.INTERCEPTOR_REGISTERED)) {
|
||||
continue;
|
||||
}
|
||||
theInvokers.put((POINTCUT) nextPointcut, nextAddedHook);
|
||||
theInvokers.put(nextPointcut, nextAddedHook);
|
||||
}
|
||||
|
||||
// Make sure we're always sorted according to the order declared in @Order
|
||||
for (IPointcut nextPointcut : theInvokers.keys()) {
|
||||
List<BaseInvoker> nextInvokerList = theInvokers.get((POINTCUT) nextPointcut);
|
||||
for (POINTCUT nextPointcut : theInvokers.keys()) {
|
||||
List<BaseInvoker> nextInvokerList = theInvokers.get(nextPointcut);
|
||||
nextInvokerList.sort(Comparator.naturalOrder());
|
||||
}
|
||||
|
||||
@ -489,60 +449,12 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
|
||||
protected abstract Optional<HookDescriptor> scanForHook(Method nextMethod);
|
||||
|
||||
protected static <T extends Annotation> Optional<T> findAnnotation(AnnotatedElement theObject, Class<T> theHookClass) {
|
||||
T annotation;
|
||||
if (theObject instanceof Method) {
|
||||
annotation = MethodUtils.getAnnotation((Method) theObject, theHookClass, true, true);
|
||||
} else {
|
||||
annotation = theObject.getAnnotation(theHookClass);
|
||||
}
|
||||
return Optional.ofNullable(annotation);
|
||||
}
|
||||
|
||||
private static int determineOrder(Class<?> theInterceptorClass) {
|
||||
int typeOrder = Interceptor.DEFAULT_ORDER;
|
||||
Optional<Interceptor> typeOrderAnnotation = findAnnotation(theInterceptorClass, Interceptor.class);
|
||||
if (typeOrderAnnotation.isPresent()) {
|
||||
typeOrder = typeOrderAnnotation.get().order();
|
||||
}
|
||||
return typeOrder;
|
||||
}
|
||||
|
||||
private static String toErrorString(List<String> theParameterTypes) {
|
||||
return theParameterTypes
|
||||
.stream()
|
||||
.sorted()
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
protected abstract static class BaseInvoker implements Comparable<BaseInvoker> {
|
||||
|
||||
private final int myOrder;
|
||||
private final Object myInterceptor;
|
||||
|
||||
BaseInvoker(Object theInterceptor, int theOrder) {
|
||||
myInterceptor = theInterceptor;
|
||||
myOrder = theOrder;
|
||||
}
|
||||
|
||||
public Object getInterceptor() {
|
||||
return myInterceptor;
|
||||
}
|
||||
|
||||
abstract Object invoke(HookParams theParams);
|
||||
|
||||
@Override
|
||||
public int compareTo(BaseInvoker theInvoker) {
|
||||
return myOrder - theInvoker.myOrder;
|
||||
}
|
||||
}
|
||||
|
||||
private static class HookInvoker extends BaseInvoker {
|
||||
private class HookInvoker extends BaseInvoker {
|
||||
|
||||
private final Method myMethod;
|
||||
private final Class<?>[] myParameterTypes;
|
||||
private final int[] myParameterIndexes;
|
||||
private final IPointcut myPointcut;
|
||||
private final POINTCUT myPointcut;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
@ -579,7 +491,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
.toString();
|
||||
}
|
||||
|
||||
public IPointcut getPointcut() {
|
||||
public POINTCUT getPointcut() {
|
||||
return myPointcut;
|
||||
}
|
||||
|
||||
@ -624,17 +536,17 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
|
||||
}
|
||||
|
||||
protected static class HookDescriptor {
|
||||
protected class HookDescriptor {
|
||||
|
||||
private final IPointcut myPointcut;
|
||||
private final POINTCUT myPointcut;
|
||||
private final int myOrder;
|
||||
|
||||
public HookDescriptor(IPointcut thePointcut, int theOrder) {
|
||||
public HookDescriptor(POINTCUT thePointcut, int theOrder) {
|
||||
myPointcut = thePointcut;
|
||||
myOrder = theOrder;
|
||||
}
|
||||
|
||||
IPointcut getPointcut() {
|
||||
POINTCUT getPointcut() {
|
||||
return myPointcut;
|
||||
}
|
||||
|
||||
@ -644,4 +556,49 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
||||
|
||||
}
|
||||
|
||||
protected abstract static class BaseInvoker implements Comparable<BaseInvoker> {
|
||||
|
||||
private final int myOrder;
|
||||
private final Object myInterceptor;
|
||||
|
||||
BaseInvoker(Object theInterceptor, int theOrder) {
|
||||
myInterceptor = theInterceptor;
|
||||
myOrder = theOrder;
|
||||
}
|
||||
|
||||
public Object getInterceptor() {
|
||||
return myInterceptor;
|
||||
}
|
||||
|
||||
abstract Object invoke(HookParams theParams);
|
||||
|
||||
@Override
|
||||
public int compareTo(BaseInvoker theInvoker) {
|
||||
return myOrder - theInvoker.myOrder;
|
||||
}
|
||||
}
|
||||
|
||||
protected static <T extends Annotation> Optional<T> findAnnotation(AnnotatedElement theObject, Class<T> theHookClass) {
|
||||
T annotation;
|
||||
if (theObject instanceof Method) {
|
||||
annotation = MethodUtils.getAnnotation((Method) theObject, theHookClass, true, true);
|
||||
} else {
|
||||
annotation = theObject.getAnnotation(theHookClass);
|
||||
}
|
||||
return Optional.ofNullable(annotation);
|
||||
}
|
||||
|
||||
private static int determineOrder(Class<?> theInterceptorClass) {
|
||||
return findAnnotation(theInterceptorClass, Interceptor.class)
|
||||
.map(Interceptor::order)
|
||||
.orElse(Interceptor.DEFAULT_ORDER);
|
||||
}
|
||||
|
||||
private static String toErrorString(List<String> theParameterTypes) {
|
||||
return theParameterTypes
|
||||
.stream()
|
||||
.sorted()
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ public class InterceptorService extends BaseInterceptorService<Pointcut> impleme
|
||||
* @param theName The name for this registry (useful for troubleshooting)
|
||||
*/
|
||||
public InterceptorService(String theName) {
|
||||
super(theName);
|
||||
super(Pointcut.class, theName);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -72,7 +72,7 @@ public class InterceptorService extends BaseInterceptorService<Pointcut> impleme
|
||||
}
|
||||
|
||||
|
||||
private class AnonymousLambdaInvoker extends BaseInvoker {
|
||||
private static class AnonymousLambdaInvoker extends BaseInvoker {
|
||||
private final IAnonymousInterceptor myHook;
|
||||
private final Pointcut myPointcut;
|
||||
|
||||
|
@ -142,7 +142,7 @@ public final class HapiSystemProperties {
|
||||
}
|
||||
|
||||
/**
|
||||
* This property sets {@link DaoConfig#setStatusBasedReindexingDisabled(Boolean)} to true when the system starts up.
|
||||
* This property sets {@link JpaStorageSettings#setStatusBasedReindexingDisabled(Boolean)} to true when the system starts up.
|
||||
*/
|
||||
public static void enableStatusBasedReindex() {
|
||||
System.clearProperty(DISABLE_STATUS_BASED_REINDEX);
|
||||
|
@ -3,18 +3,16 @@ package ca.uhn.fhir.interceptor.executor;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.IPointcut;
|
||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@ -24,16 +22,14 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertSame;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
//import org.junit.jupiter.api.Disabled;
|
||||
|
||||
public class InterceptorServiceTest {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(InterceptorServiceTest.class);
|
||||
private List<String> myInvocations = new ArrayList<>();
|
||||
private final List<String> myInvocations = new ArrayList<>();
|
||||
|
||||
@Test
|
||||
public void testInterceptorWithAnnotationDefinedOnInterface() {
|
||||
@ -203,8 +199,11 @@ public class InterceptorServiceTest {
|
||||
// Registered in opposite order to verify that the order on the annotation is used
|
||||
MyTestInterceptorTwo interceptor1 = new MyTestInterceptorTwo();
|
||||
MyTestInterceptorOne interceptor0 = new MyTestInterceptorOne();
|
||||
assertFalse(svc.hasHooks(Pointcut.TEST_RB));
|
||||
svc.registerInterceptor(interceptor1);
|
||||
assertTrue(svc.hasHooks(Pointcut.TEST_RB));
|
||||
svc.registerInterceptor(interceptor0);
|
||||
assertTrue(svc.hasHooks(Pointcut.TEST_RB));
|
||||
|
||||
// Register the manual interceptor (has Order right in the middle)
|
||||
MyTestInterceptorManual myInterceptorManual = new MyTestInterceptorManual();
|
||||
@ -236,6 +235,12 @@ public class InterceptorServiceTest {
|
||||
assertTrue(globalInterceptors.get(0) instanceof MyTestInterceptorOne, globalInterceptors.get(0).getClass().toString());
|
||||
assertTrue(globalInterceptors.get(1) instanceof MyTestInterceptorTwo, globalInterceptors.get(1).getClass().toString());
|
||||
|
||||
// Unregister the two others
|
||||
assertTrue(svc.hasHooks(Pointcut.TEST_RB));
|
||||
svc.unregisterInterceptor(interceptor1);
|
||||
assertTrue(svc.hasHooks(Pointcut.TEST_RB));
|
||||
svc.unregisterInterceptor(interceptor0);
|
||||
assertFalse(svc.hasHooks(Pointcut.TEST_RB));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -248,8 +253,10 @@ public class InterceptorServiceTest {
|
||||
svc.registerInterceptor(interceptor1);
|
||||
svc.registerInterceptor(interceptor0);
|
||||
|
||||
boolean outcome = svc.callHooks(Pointcut.TEST_RB, new HookParams("A", "B"));
|
||||
assertTrue(outcome);
|
||||
if (svc.hasHooks(Pointcut.TEST_RB)) {
|
||||
boolean outcome = svc.callHooks(Pointcut.TEST_RB, new HookParams("A", "B"));
|
||||
assertTrue(outcome);
|
||||
}
|
||||
|
||||
assertThat(myInvocations, contains("MyTestInterceptorOne.testRb", "MyTestInterceptorTwo.testRb"));
|
||||
assertSame("A", interceptor0.myLastString0);
|
||||
@ -257,6 +264,26 @@ public class InterceptorServiceTest {
|
||||
assertSame("B", interceptor1.myLastString1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvokeAnonymousInterceptorMethods() {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
|
||||
MyTestAnonymousInterceptorOne interceptor0 = new MyTestAnonymousInterceptorOne();
|
||||
MyTestAnonymousInterceptorTwo interceptor1 = new MyTestAnonymousInterceptorTwo();
|
||||
svc.registerAnonymousInterceptor(Pointcut.TEST_RB, interceptor0);
|
||||
svc.registerAnonymousInterceptor(Pointcut.TEST_RB, interceptor1);
|
||||
|
||||
if (svc.hasHooks(Pointcut.TEST_RB)) {
|
||||
boolean outcome = svc.callHooks(Pointcut.TEST_RB, new HookParams("A", "B"));
|
||||
assertTrue(outcome);
|
||||
}
|
||||
|
||||
assertThat(myInvocations, contains("MyTestAnonymousInterceptorOne.testRb", "MyTestAnonymousInterceptorTwo.testRb"));
|
||||
assertSame("A", interceptor0.myLastString0);
|
||||
assertSame("A", interceptor1.myLastString0);
|
||||
assertSame("B", interceptor1.myLastString1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvokeUsingSupplierArg() {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
@ -320,8 +347,8 @@ public class InterceptorServiceTest {
|
||||
.add(String.class, null)
|
||||
.add(String.class, null);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals(null, interceptor.myValue0);
|
||||
assertEquals(null, interceptor.myValue1);
|
||||
assertNull(interceptor.myValue0);
|
||||
assertNull(interceptor.myValue1);
|
||||
svc.unregisterAllInterceptors();
|
||||
|
||||
// First null
|
||||
@ -331,7 +358,7 @@ public class InterceptorServiceTest {
|
||||
.add(String.class, null)
|
||||
.add(String.class, "A");
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals(null, interceptor.myValue0);
|
||||
assertNull(interceptor.myValue0);
|
||||
assertEquals("A", interceptor.myValue1);
|
||||
svc.unregisterAllInterceptors();
|
||||
|
||||
@ -343,7 +370,7 @@ public class InterceptorServiceTest {
|
||||
.add(String.class, null);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals("A", interceptor.myValue0);
|
||||
assertEquals(null, interceptor.myValue1);
|
||||
assertNull(interceptor.myValue1);
|
||||
svc.unregisterAllInterceptors();
|
||||
|
||||
}
|
||||
@ -399,9 +426,9 @@ public class InterceptorServiceTest {
|
||||
assertEquals("AAA", e.getMessage());
|
||||
}
|
||||
|
||||
assertEquals(true, interceptor0.myHit);
|
||||
assertEquals(true, interceptor1.myHit);
|
||||
assertEquals(true, interceptor2.myHit);
|
||||
assertTrue(interceptor0.myHit);
|
||||
assertTrue(interceptor1.myHit);
|
||||
assertTrue(interceptor2.myHit);
|
||||
}
|
||||
|
||||
|
||||
@ -465,7 +492,7 @@ public class InterceptorServiceTest {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@Test
|
||||
public void testValidateParamTypesWrongParam() {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
@ -485,110 +512,6 @@ public class InterceptorServiceTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThreadLocalHookInterceptor() {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
svc.setThreadlocalInvokersEnabled(true);
|
||||
|
||||
HookParams params = new HookParams().add("A").add("B");
|
||||
|
||||
@Interceptor(order = 100)
|
||||
class LocalInterceptor {
|
||||
|
||||
private int myCount = 0;
|
||||
|
||||
@Hook(Pointcut.TEST_RB)
|
||||
public boolean testRb(String theString0, String theString1) {
|
||||
myCount++;
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
LocalInterceptor interceptor = new LocalInterceptor();
|
||||
svc.registerThreadLocalInterceptor(interceptor);
|
||||
try {
|
||||
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals(5, interceptor.myCount);
|
||||
|
||||
} finally {
|
||||
svc.unregisterThreadLocalInterceptor(interceptor);
|
||||
}
|
||||
|
||||
// Call some more - The interceptor is removed so the count shouldn't change
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals(5, interceptor.myCount);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* JA 20190321 On my MBP 2018
|
||||
* ThreadLocalEnabled=true - Performed 500000 loops in 8383.0ms - 0.017ms / loop
|
||||
* ThreadLocalEnabled=false - Performed 500000 loops in 3743.0ms - 0.007ms / loop
|
||||
* ThreadLocalEnabled=true - Performed 500000 loops in 6163.0ms - 0.012ms / loop
|
||||
* ThreadLocalEnabled=false - Performed 500000 loops in 3487.0ms - 0.007ms / loop
|
||||
* ThreadLocalEnabled=true - Performed 1000000 loops in 00:00:12.458 - 0.012ms / loop
|
||||
* ThreadLocalEnabled=false - Performed 1000000 loops in 7046.0ms - 0.007ms / loop
|
||||
* </pre>
|
||||
*/
|
||||
@Test
|
||||
@Disabled("Performance test - Not needed normally")
|
||||
public void testThreadLocalHookInterceptorMicroBenchmark() {
|
||||
threadLocalMicroBenchmark(true, 500000);
|
||||
threadLocalMicroBenchmark(false, 500000);
|
||||
threadLocalMicroBenchmark(true, 500000);
|
||||
threadLocalMicroBenchmark(false, 500000);
|
||||
threadLocalMicroBenchmark(true, 500000);
|
||||
threadLocalMicroBenchmark(false, 500000);
|
||||
}
|
||||
|
||||
private void threadLocalMicroBenchmark(boolean theThreadlocalInvokersEnabled, int theCount) {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
svc.setThreadlocalInvokersEnabled(theThreadlocalInvokersEnabled);
|
||||
|
||||
HookParams params = new HookParams().add("A").add("B");
|
||||
|
||||
@Interceptor(order = 100)
|
||||
class LocalInterceptor {
|
||||
|
||||
private int myCount = 0;
|
||||
|
||||
@Hook(Pointcut.TEST_RB)
|
||||
public void testRb(String theString0, String theString1) {
|
||||
myCount++;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
LocalInterceptor interceptor = new LocalInterceptor();
|
||||
StopWatch sw = new StopWatch();
|
||||
for (int i = 0; i < theCount; i++) {
|
||||
|
||||
svc.registerThreadLocalInterceptor(interceptor);
|
||||
try {
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
} finally {
|
||||
svc.unregisterThreadLocalInterceptor(interceptor);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ourLog.info("ThreadLocalEnabled={} - Performed {} loops in {} - {} / loop - Outcomne: {}", theThreadlocalInvokersEnabled, theCount, sw.toString(), sw.formatMillisPerOperation(theCount), interceptor.myCount);
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
myInvocations.clear();
|
||||
@ -634,6 +557,27 @@ public class InterceptorServiceTest {
|
||||
}
|
||||
}
|
||||
|
||||
public class MyTestAnonymousInterceptorOne implements IAnonymousInterceptor {
|
||||
private String myLastString0;
|
||||
@Override
|
||||
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
||||
myLastString0 = theArgs.get(String.class, 0);
|
||||
myInvocations.add("MyTestAnonymousInterceptorOne.testRb");
|
||||
}
|
||||
}
|
||||
|
||||
public class MyTestAnonymousInterceptorTwo implements IAnonymousInterceptor {
|
||||
private String myLastString0;
|
||||
private String myLastString1;
|
||||
|
||||
@Override
|
||||
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
||||
myLastString0 = theArgs.get(String.class, 0);
|
||||
myLastString1 = theArgs.get(String.class, 1);
|
||||
myInvocations.add("MyTestAnonymousInterceptorTwo.testRb");
|
||||
}
|
||||
}
|
||||
|
||||
@Interceptor(order = 200)
|
||||
public class MyTestInterceptorManual {
|
||||
@Hook(Pointcut.TEST_RB)
|
||||
@ -662,12 +606,6 @@ public class InterceptorServiceTest {
|
||||
private static class CanonicalSubscription {
|
||||
}
|
||||
|
||||
/**
|
||||
* Just a make-believe version of this class for the unit test
|
||||
*/
|
||||
private static class ResourceDeliveryMessage {
|
||||
}
|
||||
|
||||
@Interceptor()
|
||||
public static class InterceptorThatFailsOnRegister {
|
||||
|
||||
|
@ -4,14 +4,14 @@
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -186,6 +186,7 @@ public abstract class BaseApp {
|
||||
commands.add(new ExportConceptMapToCsvCommand());
|
||||
commands.add(new ImportCsvToConceptMapCommand());
|
||||
commands.add(new HapiFlywayMigrateDatabaseCommand());
|
||||
commands.add(new HapiClearMigrationLockCommand());
|
||||
commands.add(new CreatePackageCommand());
|
||||
commands.add(new BulkImportCommand());
|
||||
commands.add(new ReindexTerminologyCommand());
|
||||
|
@ -0,0 +1,92 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Command Line Client - API
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.HapiMigrator;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class BaseClearMigrationLockCommand extends BaseCommand {
|
||||
|
||||
public static final String CLEAR_LOCK = "clear-migration-lock";
|
||||
private String myMigrationTableName;
|
||||
|
||||
@Override
|
||||
public String getCommandDescription() {
|
||||
return "This command clears a database migration lock";
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getCommandName() {
|
||||
return CLEAR_LOCK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options retVal = new Options();
|
||||
addRequiredOption(retVal, "u", "url", "URL", "The JDBC database URL");
|
||||
addRequiredOption(retVal, "n", "username", "Username", "The JDBC database username");
|
||||
addRequiredOption(retVal, "p", "password", "Password", "The JDBC database password");
|
||||
addRequiredOption(retVal, "d", "driver", "Driver", "The database driver to use (Options are " + driverOptions() + ")");
|
||||
addRequiredOption(retVal, "l", "lock-uuid", "Lock UUID", "The UUID value of the lock held in the database.");
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private String driverOptions() {
|
||||
return Arrays.stream(DriverTypeEnum.values()).map(Enum::name).collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(CommandLine theCommandLine) throws ParseException {
|
||||
|
||||
String url = theCommandLine.getOptionValue("u");
|
||||
String username = theCommandLine.getOptionValue("n");
|
||||
String password = theCommandLine.getOptionValue("p");
|
||||
String lockUUID = theCommandLine.getOptionValue("l");
|
||||
DriverTypeEnum driverType;
|
||||
String driverTypeString = theCommandLine.getOptionValue("d");
|
||||
try {
|
||||
driverType = DriverTypeEnum.valueOf(driverTypeString);
|
||||
} catch (Exception e) {
|
||||
throw new ParseException(Msg.code(2774) + "Invalid driver type \"" + driverTypeString + "\". Valid values are: " + driverOptions());
|
||||
}
|
||||
|
||||
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = driverType.newConnectionProperties(url, username, password);
|
||||
HapiMigrator migrator = new HapiMigrator(myMigrationTableName, connectionProperties.getDataSource(), driverType);
|
||||
migrator.clearMigrationLockWithUUID(lockUUID);
|
||||
}
|
||||
|
||||
|
||||
protected void setMigrationTableName(String theMigrationTableName) {
|
||||
myMigrationTableName = theMigrationTableName;
|
||||
}
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Command Line Client - API
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.SchemaMigrator;
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
public class HapiClearMigrationLockCommand extends BaseClearMigrationLockCommand {
|
||||
@Override
|
||||
public void run(CommandLine theCommandLine) throws ParseException {
|
||||
setMigrationTableName(SchemaMigrator.HAPI_FHIR_MIGRATION_TABLENAME);
|
||||
super.run(theCommandLine);
|
||||
}
|
||||
}
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.cli;
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.demo.ContextHolder;
|
||||
import ca.uhn.fhir.jpa.demo.FhirServerConfig;
|
||||
import ca.uhn.fhir.jpa.demo.FhirServerConfigDstu3;
|
||||
@ -77,7 +77,7 @@ public class RunServerCommand extends BaseCommand {
|
||||
|
||||
addOptionalOption(options, "u", "url", "Url", "If this option is set, specifies the JDBC URL to use for the database connection");
|
||||
|
||||
Long defaultReuseSearchResults = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
Long defaultReuseSearchResults = JpaStorageSettings.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
String defaultReuseSearchResultsStr = defaultReuseSearchResults == null ? "off" : String.valueOf(defaultReuseSearchResults);
|
||||
options.addOption(null, OPTION_REUSE_SEARCH_RESULTS_MILLIS, true, "The time in milliseconds within which the same results will be returned for multiple identical searches, or \"off\" (default is " + defaultReuseSearchResultsStr + ")");
|
||||
return options;
|
||||
|
@ -1,31 +0,0 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class BaseAppTest {
|
||||
|
||||
private final PrintStream standardOut = System.out;
|
||||
private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
System.setOut(new PrintStream(outputStreamCaptor));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() {
|
||||
System.setOut(standardOut);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHelpOption() {
|
||||
App.main(new String[]{"help", "create-package"});
|
||||
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
|
||||
}
|
||||
}
|
@ -89,7 +89,7 @@ public class BulkImportCommandIT {
|
||||
|
||||
private Batch2JobStartResponse createJobStartResponse(String theId) {
|
||||
Batch2JobStartResponse response = new Batch2JobStartResponse();
|
||||
response.setJobId(theId);
|
||||
response.setInstanceId(theId);
|
||||
return response;
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,32 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
|
||||
import org.apache.commons.io.output.TeeOutputStream;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
/**
|
||||
* This class splits output stream to both STDOUT, and a capturing byte array output stream, which can later be inspected.
|
||||
*/
|
||||
public class ConsoleOutputCapturingBaseTest {
|
||||
|
||||
protected final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream();
|
||||
protected final TeeOutputStream myTeeOutputStream = new TeeOutputStream(System.out, outputStreamCaptor);
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
System.setOut(new PrintStream(myTeeOutputStream));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() {
|
||||
outputStreamCaptor.reset();
|
||||
System.setOut(System.out);
|
||||
}
|
||||
protected String getConsoleOutput() {
|
||||
return outputStreamCaptor.toString().trim();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,313 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.dao.HapiMigrationDao;
|
||||
import ca.uhn.fhir.jpa.migrate.entity.HapiMigrationEntity;
|
||||
import ca.uhn.fhir.system.HapiSystemProperties;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.jdbc.core.JdbcTemplate;
|
||||
import org.springframework.jdbc.core.support.AbstractLobCreatingPreparedStatementCallback;
|
||||
import org.springframework.jdbc.support.lob.DefaultLobHandler;
|
||||
import org.springframework.jdbc.support.lob.LobCreator;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Timestamp;
|
||||
import java.sql.Types;
|
||||
import java.util.*;
|
||||
|
||||
import static ca.uhn.fhir.jpa.migrate.HapiMigrationLock.LOCK_PID;
|
||||
import static ca.uhn.fhir.jpa.migrate.HapiMigrationStorageSvc.LOCK_TYPE;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class HapiClearMigrationLockCommandTest extends ConsoleOutputCapturingBaseTest {
|
||||
|
||||
private static final Logger ourLog = getLogger(HapiClearMigrationLockCommandTest.class);
|
||||
|
||||
public static final String DB_DIRECTORY = "target/h2_test";
|
||||
|
||||
static {
|
||||
HapiSystemProperties.enableTestMode();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClearNonExistingLockIncorrectLock() throws IOException {
|
||||
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_incorrect_lock");
|
||||
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
|
||||
String correctLockUUID = UUID.randomUUID().toString();
|
||||
String incorrectLockUUID = UUID.randomUUID().toString();
|
||||
createAndSaveLockRow(correctLockUUID, dao);
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseClearMigrationLockCommand.CLEAR_LOCK,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", connectionData.url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-l", incorrectLockUUID
|
||||
};
|
||||
|
||||
int beforeClearMigrationCount = dao.findAll().size();
|
||||
try {
|
||||
App.main(args);
|
||||
fail();
|
||||
} catch (CommandFailureException e) {
|
||||
assertThat(e.getMessage(), containsString("HAPI-2152: Internal error: on unlocking, a competing lock was found"));
|
||||
}
|
||||
}
|
||||
@Test
|
||||
public void testClearNonExistingLockNoLocks() throws IOException {
|
||||
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_nonexisting_lock");
|
||||
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
|
||||
String lockUUID = UUID.randomUUID().toString();
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseClearMigrationLockCommand.CLEAR_LOCK,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", connectionData.url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-l", lockUUID
|
||||
};
|
||||
|
||||
int beforeClearMigrationCount = dao.findAll().size();
|
||||
App.main(args);
|
||||
int afterClearMigrationCount = dao.findAll().size();
|
||||
int removedRows = beforeClearMigrationCount - afterClearMigrationCount;
|
||||
assertEquals(0, removedRows);
|
||||
assertThat(getConsoleOutput(), containsString("Did not successfully remove lock entry. [uuid="+ lockUUID +"]"));
|
||||
}
|
||||
@Test
|
||||
public void testMigrateAndClearExistingLock() throws IOException, SQLException {
|
||||
ConnectionData connectionData = createSchemaAndMigrate("test_migrate_clear_existing_lock");
|
||||
HapiMigrationDao dao = new HapiMigrationDao(connectionData.connectionProperties.getDataSource(), DriverTypeEnum.H2_EMBEDDED, "FLY_HFJ_MIGRATION");
|
||||
String lockUUID = UUID.randomUUID().toString();
|
||||
createAndSaveLockRow(lockUUID, dao);
|
||||
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseClearMigrationLockCommand.CLEAR_LOCK,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", connectionData.url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-l", lockUUID
|
||||
};
|
||||
int beforeClearMigrationCount = dao.findAll().size();
|
||||
App.main(args);
|
||||
int afterClearMigrationCount = dao.findAll().size();
|
||||
int removedRows = beforeClearMigrationCount - afterClearMigrationCount;
|
||||
|
||||
assertEquals(1, removedRows);
|
||||
assertThat(getConsoleOutput(), containsString("Successfully removed lock entry. [uuid="+ lockUUID +"]"));
|
||||
}
|
||||
|
||||
private record ConnectionData(DriverTypeEnum.ConnectionProperties connectionProperties, String url) {}
|
||||
public ConnectionData createSchemaAndMigrate(String theDbName) throws IOException {
|
||||
|
||||
File location = getLocation(theDbName);
|
||||
|
||||
String url = "jdbc:h2:" + location.getAbsolutePath();
|
||||
DriverTypeEnum.ConnectionProperties connectionProperties = DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "", "");
|
||||
String initSql = "/persistence_create_h2_340.sql";
|
||||
executeSqlStatements(connectionProperties, initSql);
|
||||
|
||||
seedDatabase340(connectionProperties);
|
||||
|
||||
ourLog.info("**********************************************");
|
||||
ourLog.info("Done Setup, Starting Migration...");
|
||||
ourLog.info("**********************************************");
|
||||
|
||||
String[] args = new String[]{
|
||||
BaseFlywayMigrateDatabaseCommand.MIGRATE_DATABASE,
|
||||
"-d", "H2_EMBEDDED",
|
||||
"-u", url,
|
||||
"-n", "",
|
||||
"-p", "",
|
||||
"-r"
|
||||
};
|
||||
App.main(args);
|
||||
return new ConnectionData(connectionProperties, url);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static void createAndSaveLockRow(String theLockUUID, HapiMigrationDao theDao) {
|
||||
HapiMigrationEntity me = new HapiMigrationEntity();
|
||||
me.setPid(LOCK_PID);
|
||||
me.setChecksum(100);
|
||||
me.setDescription(theLockUUID);
|
||||
me.setSuccess(true);
|
||||
me.setExecutionTime(20);
|
||||
me.setInstalledBy("gary");
|
||||
me.setInstalledOn(new Date());
|
||||
me.setVersion("2023.1");
|
||||
me.setType(LOCK_TYPE);
|
||||
theDao.save(me);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private File getLocation(String theDatabaseName) throws IOException {
|
||||
File directory = new File(DB_DIRECTORY);
|
||||
if (directory.exists()) {
|
||||
FileUtils.deleteDirectory(directory);
|
||||
}
|
||||
|
||||
return new File(DB_DIRECTORY + "/" + theDatabaseName);
|
||||
}
|
||||
|
||||
private void seedDatabase340(DriverTypeEnum.ConnectionProperties theConnectionProperties) {
|
||||
theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
JdbcTemplate jdbcTemplate = theConnectionProperties.newJdbcTemplate();
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_RESOURCE (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, SP_HAS_LINKS, HASH_SHA256, SP_INDEX_STATUS, RES_LANGUAGE, SP_CMPSTR_UNIQ_PRESENT, SP_COORDS_PRESENT, SP_DATE_PRESENT, SP_NUMBER_PRESENT, SP_QUANTITY_PRESENT, SP_STRING_PRESENT, SP_TOKEN_PRESENT, SP_URI_PRESENT, RES_PROFILE, RES_TYPE, RES_VER, RES_ID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setNull(1, Types.TIMESTAMP);
|
||||
thePs.setString(2, "R4");
|
||||
thePs.setNull(3, Types.BIGINT);
|
||||
thePs.setBoolean(4, false);
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setBoolean(7, false);
|
||||
thePs.setNull(8, Types.VARCHAR);
|
||||
thePs.setLong(9, 1L);
|
||||
thePs.setNull(10, Types.VARCHAR);
|
||||
thePs.setBoolean(11, false);
|
||||
thePs.setBoolean(12, false);
|
||||
thePs.setBoolean(13, false);
|
||||
thePs.setBoolean(14, false);
|
||||
thePs.setBoolean(15, false);
|
||||
thePs.setBoolean(16, false);
|
||||
thePs.setBoolean(17, false);
|
||||
thePs.setBoolean(18, false);
|
||||
thePs.setNull(19, Types.VARCHAR);
|
||||
thePs.setString(20, "Patient");
|
||||
thePs.setLong(21, 1L);
|
||||
thePs.setLong(22, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_RES_VER (RES_DELETED_AT, RES_VERSION, FORCED_ID_PID, HAS_TAGS, RES_PUBLISHED, RES_UPDATED, RES_ENCODING, RES_TEXT, RES_ID, RES_TYPE, RES_VER, PID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setNull(1, Types.TIMESTAMP);
|
||||
thePs.setString(2, "R4");
|
||||
thePs.setNull(3, Types.BIGINT);
|
||||
thePs.setBoolean(4, false);
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(7, "JSON");
|
||||
theLobCreator.setBlobAsBytes(thePs, 8, "{\"resourceType\":\"Patient\"}".getBytes(Charsets.US_ASCII));
|
||||
thePs.setLong(9, 1L);
|
||||
thePs.setString(10, "Patient");
|
||||
thePs.setLong(11, 1L);
|
||||
thePs.setLong(12, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_STRING (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_EXACT, SP_VALUE_NORMALIZED, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "given");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(6, "ROBERT");
|
||||
thePs.setString(7, "Robert");
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_TOKEN (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_SYSTEM, SP_VALUE, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "identifier");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setString(6, "http://foo");
|
||||
thePs.setString(7, "12345678");
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
jdbcTemplate.execute(
|
||||
"insert into HFJ_SPIDX_DATE (SP_MISSING, SP_NAME, RES_ID, RES_TYPE, SP_UPDATED, SP_VALUE_HIGH, SP_VALUE_LOW, SP_ID) values (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
new AbstractLobCreatingPreparedStatementCallback(new DefaultLobHandler()) {
|
||||
@Override
|
||||
protected void setValues(PreparedStatement thePs, LobCreator theLobCreator) throws SQLException {
|
||||
thePs.setBoolean(1, false);
|
||||
thePs.setString(2, "birthdate");
|
||||
thePs.setLong(3, 1L); // res-id
|
||||
thePs.setString(4, "Patient");
|
||||
thePs.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
|
||||
thePs.setTimestamp(6, new Timestamp(1000000000L)); // value high
|
||||
thePs.setTimestamp(7, new Timestamp(1000000000L)); // value low
|
||||
thePs.setLong(8, 1L);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private void executeSqlStatements(DriverTypeEnum.ConnectionProperties theConnectionProperties, String theInitSql) throws
|
||||
IOException {
|
||||
String script = IOUtils.toString(HapiClearMigrationLockCommandTest.class.getResourceAsStream(theInitSql), Charsets.UTF_8);
|
||||
List<String> scriptStatements = new ArrayList<>(Arrays.asList(script.split("\n")));
|
||||
for (int i = 0; i < scriptStatements.size(); i++) {
|
||||
String nextStatement = scriptStatements.get(i);
|
||||
if (isBlank(nextStatement)) {
|
||||
scriptStatements.remove(i);
|
||||
i--;
|
||||
continue;
|
||||
}
|
||||
|
||||
nextStatement = nextStatement.trim();
|
||||
while (nextStatement.endsWith(";")) {
|
||||
nextStatement = nextStatement.substring(0, nextStatement.length() - 1);
|
||||
}
|
||||
scriptStatements.set(i, nextStatement);
|
||||
}
|
||||
|
||||
theConnectionProperties.getTxTemplate().execute(t -> {
|
||||
for (String next : scriptStatements) {
|
||||
theConnectionProperties.newJdbcTemplate().execute(next);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,13 @@
|
||||
package ca.uhn.fhir.cli;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class HelpOptionTest extends ConsoleOutputCapturingBaseTest {
|
||||
@Test
|
||||
public void testHelpOption() {
|
||||
App.main(new String[]{"help", "create-package"});
|
||||
assertThat(outputStreamCaptor.toString().trim(), outputStreamCaptor.toString().trim(), containsString("Usage"));
|
||||
}
|
||||
}
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -20,10 +20,9 @@ package ca.uhn.fhir.jpa.demo;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
@ -39,20 +38,15 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
public class CommonConfig {
|
||||
|
||||
/**
|
||||
* Configure FHIR properties around the the JPA server via this bean
|
||||
* Configure FHIR properties around the JPA server via this bean
|
||||
*/
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
retVal.setAllowMultipleDelete(true);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ModelConfig modelConfig() {
|
||||
return daoConfig().getModelConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* The following bean configures the database connection. The 'url' property value of "jdbc:h2:file:target./jpaserver_h2_files" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_h2_files".
|
||||
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.demo;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
@ -36,7 +36,7 @@ public class ContextHolder {
|
||||
private static String ourDatabaseUrl;
|
||||
|
||||
static {
|
||||
ourReuseSearchResultsMillis = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
ourReuseSearchResultsMillis = JpaStorageSettings.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
}
|
||||
|
||||
public static FhirContext getCtx() {
|
||||
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.demo;
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.config.HapiJpaConfig;
|
||||
import ca.uhn.fhir.jpa.config.JpaDstu2Config;
|
||||
import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil;
|
||||
@ -56,11 +56,11 @@ public class FhirServerConfig {
|
||||
private Properties myJpaProperties;
|
||||
|
||||
/**
|
||||
* Configure FHIR properties around the the JPA server via this bean
|
||||
* Configure FHIR properties around the JPA server via this bean
|
||||
*/
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
retVal.setAllowMultipleDelete(true);
|
||||
return retVal;
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
@ -120,14 +120,14 @@ public class JpaServerDemo extends RestfulServer {
|
||||
if (fhirVersion == FhirVersionEnum.DSTU2) {
|
||||
IFhirSystemDao<Bundle, MetaDt> systemDao = myAppCtx.getBean("mySystemDaoDstu2", IFhirSystemDao.class);
|
||||
JpaConformanceProviderDstu2 confProvider = new JpaConformanceProviderDstu2(this, systemDao,
|
||||
myAppCtx.getBean(DaoConfig.class));
|
||||
myAppCtx.getBean(JpaStorageSettings.class));
|
||||
confProvider.setImplementationDescription("Example Server");
|
||||
setServerConformanceProvider(confProvider);
|
||||
} else if (fhirVersion == FhirVersionEnum.DSTU3) {
|
||||
IFhirSystemDao<org.hl7.fhir.dstu3.model.Bundle, org.hl7.fhir.dstu3.model.Meta> systemDao = myAppCtx
|
||||
.getBean("mySystemDaoDstu3", IFhirSystemDao.class);
|
||||
JpaConformanceProviderDstu3 confProvider = new JpaConformanceProviderDstu3(this, systemDao,
|
||||
myAppCtx.getBean(DaoConfig.class), myAppCtx.getBean(ISearchParamRegistry.class));
|
||||
myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class));
|
||||
confProvider.setImplementationDescription("Example Server");
|
||||
setServerConformanceProvider(confProvider);
|
||||
} else if (fhirVersion == FhirVersionEnum.R4) {
|
||||
@ -135,7 +135,7 @@ public class JpaServerDemo extends RestfulServer {
|
||||
.getBean("mySystemDaoR4", IFhirSystemDao.class);
|
||||
IValidationSupport validationSupport = myAppCtx.getBean(IValidationSupport.class);
|
||||
JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider(this, systemDao,
|
||||
myAppCtx.getBean(DaoConfig.class), myAppCtx.getBean(ISearchParamRegistry.class), validationSupport);
|
||||
myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class), validationSupport);
|
||||
confProvider.setImplementationDescription("Example Server");
|
||||
setServerConformanceProvider(confProvider);
|
||||
} else {
|
||||
@ -168,11 +168,11 @@ public class JpaServerDemo extends RestfulServer {
|
||||
CorsInterceptor corsInterceptor = new CorsInterceptor();
|
||||
registerInterceptor(corsInterceptor);
|
||||
|
||||
DaoConfig daoConfig = myAppCtx.getBean(DaoConfig.class);
|
||||
daoConfig.setAllowExternalReferences(ContextHolder.isAllowExternalRefs());
|
||||
daoConfig.setEnforceReferentialIntegrityOnDelete(!ContextHolder.isDisableReferentialIntegrity());
|
||||
daoConfig.setEnforceReferentialIntegrityOnWrite(!ContextHolder.isDisableReferentialIntegrity());
|
||||
daoConfig.setReuseCachedSearchResultsForMillis(ContextHolder.getReuseCachedSearchResultsForMillis());
|
||||
JpaStorageSettings storageSettings = myAppCtx.getBean(JpaStorageSettings.class);
|
||||
storageSettings.setAllowExternalReferences(ContextHolder.isAllowExternalRefs());
|
||||
storageSettings.setEnforceReferentialIntegrityOnDelete(!ContextHolder.isDisableReferentialIntegrity());
|
||||
storageSettings.setEnforceReferentialIntegrityOnWrite(!ContextHolder.isDisableReferentialIntegrity());
|
||||
storageSettings.setReuseCachedSearchResultsForMillis(ContextHolder.getReuseCachedSearchResultsForMillis());
|
||||
|
||||
DaoRegistry daoRegistry = myAppCtx.getBean(DaoRegistry.class);
|
||||
IInterceptorBroadcaster interceptorBroadcaster = myAppCtx.getBean(IInterceptorBroadcaster.class);
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -29,7 +29,6 @@ import ca.uhn.fhir.context.IRuntimeDatatypeDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.model.base.resource.BaseOperationOutcome;
|
||||
@ -136,7 +135,6 @@ import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseConformance;
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -42,7 +42,7 @@ FhirContext ctx = FhirContext.forDstu2();
|
||||
ctx.setNarrativeGenerator(new DefaultThymeleafNarrativeGenerator());
|
||||
|
||||
// Encode the output, including the narrative
|
||||
String output = ctx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient);
|
||||
String output = ctx.newXmlParser().setPrettyPrint(true).encodeResourceToString(patient);
|
||||
System.out.println(output);
|
||||
//END SNIPPET: example1
|
||||
|
||||
|
@ -21,6 +21,7 @@ package ca.uhn.hapi.fhir.docs;
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||
import ca.uhn.fhir.narrative.CustomThymeleafNarrativeGenerator;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@ -33,7 +34,11 @@ FhirContext ctx = FhirContext.forDstu2();
|
||||
String propFile = "classpath:/com/foo/customnarrative.properties";
|
||||
CustomThymeleafNarrativeGenerator gen = new CustomThymeleafNarrativeGenerator(propFile);
|
||||
|
||||
Patient patient = new Patient();
|
||||
|
||||
ctx.setNarrativeGenerator(gen);
|
||||
String output = ctx.newJsonParser().encodeResourceToString(patient);
|
||||
System.out.println(output);
|
||||
//END SNIPPET: gen
|
||||
|
||||
|
||||
|
@ -0,0 +1,4 @@
|
||||
type: fix
|
||||
issue: 3482
|
||||
jira: SMILE-5076
|
||||
title: "Previously, persistence modules were attempting to activate subscriptions that used channel types they did not support. This has been changed, and those subscriptions will not be activated if the given channel type is not supported"
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4090
|
||||
title: "Previously, mdm links that connected resources and golden resources that were newly created had a link score of null,
|
||||
this changes it to 1.0 as the golden resource should be a perfect match with the source resource it was created from."
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4441
|
||||
title: "Creating a resource with an invalid embedded resource reference
|
||||
would not fail. Even if IsEnforceReferentialIntegrityOnWrite was enabled.
|
||||
This has been fixed, and invalid references will throw.
|
||||
"
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: add
|
||||
issue: 4463
|
||||
jira: SMILE-4770
|
||||
title: "Providing the capability to specify that the name of the subscription matching channel should be unqualified at creation time."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4475
|
||||
jira: SMILE-4961
|
||||
title: "Enabling mass ingestion mode alters the resource deletion process leaving resources partially deleted. The problem has been fixed."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4481
|
||||
jira: SMILE-4961
|
||||
title: "Previously, a reindex batch job would fail when executing on deleted resources. This issue has been fixed."
|
@ -0,0 +1,5 @@
|
||||
type: fix
|
||||
issue: 4485
|
||||
jira: SMILE-5561
|
||||
title: "Cross-partition subscription PUT with a custom interceptor will fail on validation because the read partition ID is used.
|
||||
This has been fixed by skipping validation if the validator invoked during an update operation"
|
@ -0,0 +1,3 @@
|
||||
type: fix
|
||||
issue: 4486
|
||||
title: "Previously, some MDM links of type `POSSIBLE_MATCH` were saved with unnormalized score values. This has been fixed."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4491
|
||||
title: "Batch2 Jobs in the FINALIZE state can now be
|
||||
cancelled."
|
@ -0,0 +1,8 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4491
|
||||
title: "Moved batch2 reduction step logic to the messaging queue.
|
||||
Before it was executed during the maintenance run directly.
|
||||
This resulted in bugs with multiple reduction steps kicking
|
||||
off for long running jobs.
|
||||
"
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4520
|
||||
jira: SMILE-4406
|
||||
title: "Updating documentation related to narrative generation."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4500
|
||||
jira: SMILE-6001
|
||||
title: "Schedule bulk export job and binary was not working with relational databases. This has now been fixed with a reimplementation for batch 2."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4508
|
||||
title: "Deleting CodeSystem resources by URL then expunging would fail to
|
||||
expunge and a foreign key error would be thrown. This has been fixed."
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4511
|
||||
jira: SMILE-6064
|
||||
title: "Previously, bulk export jobs were getting stuck in the `FINALIZE` state when performed
|
||||
with many resources and a low Bulk Export File Maximum Capacity. This has been fixed."
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4516
|
||||
title: "A new command has been added to the HAPI-FHIR CLI called `clear-migration-lock`. This can be used to fix a database state which can occur if a migration is interrupted before completing."
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4526
|
||||
title: "Fixing an issue where a long running reduction step causes
|
||||
the message not to be processed fast enough, thereby allowing
|
||||
multiple reduction step jobs to start.
|
||||
"
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
type: fix
|
||||
issue: 4533
|
||||
jira: SMILE-5554
|
||||
title: "Previously, if a message with a null header was sent to a Channel Import module and failed,
|
||||
a NullPointerException would occur and the consumer would become unable to receive any further messages.
|
||||
This has now been fixed."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: change
|
||||
issue: 4537
|
||||
title: "ResourceDeliveryMessage no longer includes the payload in toString().
|
||||
This avoids leaking sensitive data to logs and other channels."
|
@ -0,0 +1,5 @@
|
||||
---
|
||||
type: perf
|
||||
issue: 4545
|
||||
title: "The InterceptorService now maintains an EnumSet of all registered interceptor Pointcuts,
|
||||
which should improve performance when testing for the existence of specific pointcuts."
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
type: change
|
||||
issue: 4545
|
||||
title: "The settings beans for the JPA server have been renamed to better reflect their purpose. Specifically
|
||||
the `ModelConfig` bean has been renamed to `StorageSettings` and the `DaoConfig` bean has been
|
||||
renamed to `JpaStorageSettings`."
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
type: remove
|
||||
issue: 4545
|
||||
title: "The InterceptorService no longer supports ThreadLocal interceptor registrations. This
|
||||
feature was deprecated in 6.2.0 due to lack of use and has never been enabled by default. Please
|
||||
let us know on the mailing list if this affects you."
|
@ -0,0 +1,19 @@
|
||||
# CQL
|
||||
|
||||
## Introduction
|
||||
|
||||
Clinical Quality Language (CQL) is a high-level, domain-specific language focused on clinical quality and targeted at measure and decision support artifact authors. HAPI embeds a [CQL engine](https://github.com/cqframework/clinical_quality_language) allowing the evaluation of clinical knowledge artifacts that use CQL to describe their logic.
|
||||
|
||||
A more detailed description of CQL is available at the [CQL Specification Implementation Guide](https://cql.hl7.org/)
|
||||
|
||||
The FHIR [Clinical Reasoning module](http://www.hl7.org/fhir/clinicalreasoning-module.html) defines a set of resources, profiles, operations, etc. that can be used to work with clinical knowledge within FHIR. HAPI provides implementation for some of those operations, described in more detail below.
|
||||
|
||||
## Working Example
|
||||
|
||||
A complete working example of HAPI CQL can be found in the [JPA Server Starter](/hapi-fhir/docs/server_jpa/get_started.html) project. You may wish to browse its source to see how it is set up.
|
||||
|
||||
## Clinical Reasoning Operations
|
||||
|
||||
HAPI provides implementations for some operations using CQL in DSTU3 and R4:
|
||||
|
||||
[Measure Operations](/hapi-fhir/docs/clinical_reasoning/measures.html)
|
@ -0,0 +1,387 @@
|
||||
# Measures
|
||||
|
||||
## Introduction
|
||||
|
||||
The FHIR Clinical Reasoning Module defines the [Measure resource](https://www.hl7.org/fhir/measure.html) and several [associated operations](https://www.hl7.org/fhir/measure-operations.html). The Measure Resource represents a structured, computable definition of a health-related measure such as a clinical quality measure, public health indicator, or population analytics measure. These Measures can then be used for reporting, analytics, and data-exchange purposes.
|
||||
|
||||
Electronic Clinical Quality Measures (eCQMs) in FHIR are represented as a FHIR Measure resource containing metadata and terminology, a population criteria section, and at least one FHIR Library resource containing a data criteria section as well as the logic used to define the population criteria. The population criteria section typically contains initial population criteria, denominator criteria, and numerator criteria subcomponents, among others. This is elaborated upon in greater detail in the [CQF Measures IG](http://hl7.org/fhir/us/cqfmeasures). An example of an eCQM as defined in FHIR looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType" : "Measure",
|
||||
"library" : [
|
||||
"http://hl7.org/fhir/us/cqfmeasures/Library/EXMLogic"
|
||||
],
|
||||
"group" : [
|
||||
{
|
||||
"population" : [
|
||||
{
|
||||
"code" : {
|
||||
"coding" : [
|
||||
{
|
||||
"code" : "initial-population"
|
||||
}
|
||||
]
|
||||
},
|
||||
"criteria" : {
|
||||
"language" : "text/cql-identifier",
|
||||
"expression" : "Initial Population"
|
||||
}
|
||||
},
|
||||
{
|
||||
"code" : {
|
||||
"coding" : [
|
||||
{
|
||||
"code" : "numerator"
|
||||
}
|
||||
]
|
||||
},
|
||||
"criteria" : {
|
||||
"language" : "text/cql-identifier",
|
||||
"expression" : "Numerator"
|
||||
}
|
||||
},
|
||||
{
|
||||
"code" : {
|
||||
"coding" : [
|
||||
{
|
||||
"code" : "denominator"
|
||||
}
|
||||
]
|
||||
},
|
||||
"criteria" : {
|
||||
"language" : "text/cql-identifier",
|
||||
"expression" : "Denominator"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
Measures are then scored according the whether a subjects (or subjects) are members of the various populations.
|
||||
|
||||
For example, a Measure for Breast Cancer screening might define an Initial Population (via CQL expressions) of "all women", a Denominator of "women over 35", and a Numerator of "women over 35 who have had breast cancer screenings in the past year". If the Measure is evaluated against a population of 100 women, 50 are over 35, and of those 25 have had breast cancer screenings in the past year, the final score would be 50%<sup>1</sup> (total number in numerator / total number in the denominator).
|
||||
|
||||
1. There are several methods for scoring Measures, this is meant only as an example.
|
||||
|
||||
## Operations
|
||||
|
||||
HAPI implements the [$evaluate-measure](https://www.hl7.org/fhir/operation-measure-evaluate-measure.html) operation. Support for additional operations is planned.
|
||||
|
||||
## Evaluate Measure
|
||||
|
||||
The `$evaluate-measure` operation is used to execute a Measure as specified by the relevant FHIR Resources against a subject or set of subjects. This implementation currently focuses primarily on supporting the narrower evaluation requirements defined by the [CQF Measures IG](http://hl7.org/fhir/us/cqfmeasures). Some support for extensions defined by other IGs is included as well, and the implementation aims to support a wider range of functionality in the future.
|
||||
|
||||
### Example Measure
|
||||
|
||||
Several example Measures are available in the [ecqm-content-r4](https://github.com/cqframework/ecqm-content-r4) IG. Full Bundles with all the required supporting resources are available [here](https://github.com/cqframework/ecqm-content-r4/tree/master/bundles/measure). You can download a Bundle and load it on your server as a transaction:
|
||||
|
||||
```bash
|
||||
POST http://your-server-base/fhir BreastCancerScreeningFHIR-bundle.json
|
||||
```
|
||||
|
||||
These Bundles also include example Patient clinical data so once posted Measure evaluation can be invoked with:
|
||||
|
||||
```bash
|
||||
GET http://your-server-base/fhir/Measure/BreastCancerScreeningFHIR/$evaluate-measure?periodStart=2019-01-01&periodEnd=2019-12-31&subject=numerator&reportType=subject
|
||||
```
|
||||
|
||||
### Measure Features
|
||||
|
||||
The FHIR Measure specification defines several types of Measures and various parameters for controlling the Measure evaluation. This section describes the features supported by HAPI.
|
||||
|
||||
#### Reporting Period
|
||||
|
||||
The `periodStart` and `periodEnd` parameters are used to control the Reporting Period for which a report is generated. This corresponds to `Measurement Period` defined in the CQL logic, as defined by the conformance requirements in the CQF Measures IG. Both `periodStart` and `periodEnd` must be used or neither must be used.
|
||||
|
||||
If neither are used the default reporting period specified in the CQL logic is used, as shown here
|
||||
|
||||
```cql
|
||||
parameter "Measurement Period" Interval<DateTime>
|
||||
default Interval[@2019-01-01T00:00:00.0, @2020-01-01T00:00:00.0)
|
||||
```
|
||||
|
||||
If neither are used and there is no default reporting period in the CQL logic an error is thrown.
|
||||
|
||||
A request using `periodStart` and `periodEnd` looks like:
|
||||
|
||||
```bash
|
||||
GET fhir/Measure/<MeasureId>/$evaluate-measure?periodStart=2019-01-01&periodEnd=2019-12-31
|
||||
```
|
||||
|
||||
`periodStart` and `periodEnd` support Dates (YYYY, YYYY-MM, or YYYY-MM-DD) and DateTimes (YYYY-MM-DDThh:mm:ss+zz:zz)
|
||||
|
||||
#### Report Types
|
||||
|
||||
Measure report types determine what data is returned from the evaluation. This is controlled with the `reportType` parameter on the $evaluate-measure Operation
|
||||
|
||||
| Report Type | Supported | Description |
|
||||
|--------------|:------------------:|----------------------------------------------------------------------------------------------------------------|
|
||||
| subject | :white_check_mark: | Measure report for a single subject (e.g. one patient). Includes additional detail, such as evaluatedResources |
|
||||
| subject-list | :white_check_mark: | Measure report including the list of subjects in each population (e.g. all the patients in the "numerator") |
|
||||
| population | :white_check_mark: | Summary measure report for a population |
|
||||
|
||||
NOTE: There's an open issue on the FHIR specification to align these names to the MeasureReportType value set.
|
||||
|
||||
A request using `reportType` looks like:
|
||||
|
||||
```bash
|
||||
GET fhir/Measure/<MeasureId>/$evaluate-measure?reportType=subject-list
|
||||
```
|
||||
|
||||
#### Subject Types
|
||||
|
||||
The subject of a measure evaluation is controlled with the `subject` (R4+) and `patient` (DSTU3) operation parameters. Currently, the only subject type supported by HAPI is Patient. This means that all Measure evaluation and reporting happens with respect to a Patient or set of Patient resources.
|
||||
|
||||
| Subject Type | Supported | Description |
|
||||
|-------------------|:--------------------:|-------------------|
|
||||
| Patient | :white_check_mark: | A Patient |
|
||||
| Practitioner | :white_large_square: | A Practitioner |
|
||||
| Organization | :white_large_square: | An Organization |
|
||||
| Location | :white_large_square: | A Location |
|
||||
| Device | :white_large_square: | A Device |
|
||||
| Group<sup>1</sup> | :white_check_mark: | A set of subjects |
|
||||
|
||||
1. See next section
|
||||
|
||||
A request using `subject` looks like:
|
||||
|
||||
```bash
|
||||
GET fhir/Measure/<MeasureId>/$evaluate-measure?subject=Patient/123
|
||||
```
|
||||
|
||||
##### Selecting a set of Patients
|
||||
|
||||
The set of Patients used for Measure evaluation is controlled with the `subject` (R4+) or `patient` (DSTU3), and `practitioner` parameters. The two parameters are mutually exclusive.
|
||||
|
||||
| Parameter | Supported | Description |
|
||||
|-------------------------------------------------------|:------------------:|-------------------------------------------------------------------------|
|
||||
| Not specified | :white_check_mark: | All Patients on the server |
|
||||
| `subject=XXX` or `subject=Patient/XXX` | :white_check_mark: | A single Patient |
|
||||
| `practitioner=XXX` or `practitioner=Practitioner/XXX` | :white_check_mark: | All Patients whose `generalPractitioner` is the referenced Practitioner |
|
||||
| `subject=Group/XXX`<sup>1</sup> | :white_check_mark: | A Group containing subjects |
|
||||
| `subject=XXX` AND `practitioner=XXX` | :x: | Not a valid combination |
|
||||
|
||||
1. Currently only Groups containing Patient resources are supported
|
||||
|
||||
A request using `practitioner` looks like:
|
||||
|
||||
```bash
|
||||
GET fhir/Measure/<MeasureId>/$evaluate-measure?practitioner=Practitioner/XYZ
|
||||
```
|
||||
|
||||
#### ReportType, Subject, Practitioner Matrix
|
||||
|
||||
The following table shows the combinations of the `subject` (or `patient`), `practitioner` and `reportType` parameters that are valid
|
||||
|
||||
| | subject reportType | subject-list reportType | population reportType |
|
||||
|------------------------|:------------------:|:---------------------------------:|:---------------------------------:|
|
||||
| subject parameter | :white_check_mark: | :white_check_mark: <sup>1,2</sup> | :white_check_mark: <sup>1,2</sup> |
|
||||
| practitioner parameter | :x:<sup>3</sup> | :white_check_mark: | :white_check_mark: |
|
||||
|
||||
1. Including the subject parameter restricts the Measure evaluation to a single Patient. Omit the `subject` (or `patient`) parameter to get report for multiple Patients. The subject-list and population report types have less detail than a subject report.
|
||||
2. A Group `subject` with a subject-list or population `reportType` will be a valid combination once Group support is implemented.
|
||||
3. A practitioner have may zero, one, or many patients so a practitioner report always assumes a set.
|
||||
|
||||
#### Scoring Methods
|
||||
|
||||
The Measure scoring method determines how a Measure score is calculated. It is set with the [scoring](https://www.hl7.org/fhir/measure-definitions.html#Measure.scoring) element on the Measure resource.
|
||||
|
||||
The HAPI implementation conforms to the requirements defined by the CQF Measures IG. A more detailed description of each scoring method is linked in the table below.
|
||||
|
||||
| Scoring Method | Supported | Description |
|
||||
|---------------------|:--------------------:|------------------------------------------------------------------------------------------------------------------------|
|
||||
| proportion | :white_check_mark: | [Proportion Measures](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#proportion-measures) |
|
||||
| ratio | :white_check_mark: | [Ratio Measures](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#ratio-measures) |
|
||||
| continuous-variable | :white_check_mark: | [Continuous Variable](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#continuous-variable-measure) |
|
||||
| cohort | :white_check_mark:* | [Cohort](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#cohort-definitions) |
|
||||
| composite | :white_large_square: | See below |
|
||||
|
||||
* The cohort Measure scoring support is partial. The HAPI implementation does not yet return the required Measure observations
|
||||
|
||||
An example Measure resource with `scoring` defined looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Measure",
|
||||
"scoring": {
|
||||
"coding": [ {
|
||||
"system": "http://terminology.hl7.org/CodeSystem/measure-scoring",
|
||||
"code": "proportion",
|
||||
"display": "Proportion"
|
||||
} ]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
##### Composite Scoring
|
||||
|
||||
A composite Measure is scored by combining and/or aggregating the results of other Measures. The [compositeScoring](https://www.hl7.org/fhir/measure-definitions.html#Measure.compositeScoring) element is used to control how composite Measures are scored. HAPI does not currently support any composite scoring method.
|
||||
|
||||
| Composite Scoring Method | Supported | Description |
|
||||
|--------------------------|:--------------------:|------------------------------------------------------------------------------------------------|
|
||||
| opportunity | :white_large_square: | Combines Numerators and Denominators for each component Measure |
|
||||
| all-or-nothing | :white_large_square: | Includes individuals that are in the numerator for all component Measures |
|
||||
| linear | :white_large_square: | Gives an individual score based on the number of numerators in which they appear |
|
||||
| weighted | :white_large_square: | Gives an individual a cored based on a weighted factor for each numerator in which they appear |
|
||||
|
||||
#### Populations
|
||||
|
||||
The HAPI implementation uses the populations defined by the CQF Measures IG for each scoring type. A matrix of the supported populations is shown in the [Criteria Names](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#criteria-names) section of the CQF Measures IG.
|
||||
|
||||
#### Population Criteria
|
||||
|
||||
The logical criteria used for determining each Measure population is defined by the [Measure.group.population.criteria](https://hl7.org/fhir/R4/measure-definitions.html#Measure.group.population.criteria) element. The Measure specification allows population criteria to be defined using FHIR Path, CQL, or other languages as appropriate. The HAPI implementation currently only supports using CQL. The relationship between a Measure Population and CQL is illustrated in the [Population Criteria](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#population-criteria) section of the CQF Measures IG.
|
||||
|
||||
An example Measure resource with a population criteria referencing a CQL identifier looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Measure",
|
||||
"group": [ {
|
||||
"population": [ {
|
||||
"code": {
|
||||
"coding": [ {
|
||||
"system": "http://terminology.hl7.org/CodeSystem/measure-population",
|
||||
"code": "initial-population",
|
||||
"display": "Initial Population"
|
||||
} ]
|
||||
},
|
||||
"criteria": {
|
||||
"language": "text/cql-identifier",
|
||||
"expression": "Initial Population"
|
||||
}
|
||||
}]
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
##### Criteria Expression Type
|
||||
|
||||
| Expression Type | Supported |
|
||||
|-----------------|:--------------------:|
|
||||
| CQL | :white_check_mark: |
|
||||
| FHIR Path | :white_large_square: |
|
||||
|
||||
#### Supplemental Data Elements
|
||||
|
||||
Supplemental Data Elements are used to report additional information about the subjects that may not be included in the Population criteria definitions. For example, it may be of interest to report the gender of all subjects for informational purposes. Supplemental data elements are defined by the [Measure.supplementalData](http://www.hl7.org/fhir/measure-definitions.html#Measure.supplementalData) element, and are reported as Observations in the evaluatedResources of the MeasureReport.
|
||||
|
||||
Supplemental Data Elements can be specified as either CQL definitions or FHIR Path expressions.
|
||||
|
||||
| Expression Type | Supported |
|
||||
|-----------------|:--------------------:|
|
||||
| CQL | :white_check_mark: |
|
||||
| FHIR Path | :white_large_square: |
|
||||
|
||||
An example Measure resource with some supplemental data elements set looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Measure",
|
||||
"supplementalData": [ {
|
||||
"code": {
|
||||
"text": "sde-ethnicity"
|
||||
},
|
||||
"criteria": {
|
||||
"language": "text/cql-identifier",
|
||||
"expression": "SDE Ethnicity"
|
||||
}
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
#### Stratifiers
|
||||
|
||||
Stratifiers are used divide Measure populations into segments of interest. For example, it may be of interest to compare the Measure score between different age groups or genders. Each stratum within a stratification is scored the same way as the overall population. Stratifiers are defined using the [Measure.group.stratifier](http://hl7.org/fhir/R4/measure-definitions.html#Measure.group.stratifier) element.
|
||||
|
||||
An example Measure resource with a stratifier set looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"resourceType": "Measure",
|
||||
"group": [ {
|
||||
"stratifier": [ {
|
||||
"code": {
|
||||
"text": "Stratum 1"
|
||||
},
|
||||
"criteria": {
|
||||
"language": "text/cql-identifier",
|
||||
"expression": "Stratification 1"
|
||||
}
|
||||
}]
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
##### Stratifier Expression Support
|
||||
|
||||
As with Populations and Supplemental Data Elements the criteria used for Stratification may be defined with CQL or FHIR Path.
|
||||
|
||||
| Expression Type | Supported |
|
||||
|-----------------|:--------------------:|
|
||||
| CQL | :white_check_mark: |
|
||||
| FHIR Path | :white_large_square: |
|
||||
|
||||
##### Stratifier Component Support
|
||||
|
||||
The Measure specification also supports multidimensional stratification, for cases where more than one data element is needed.
|
||||
|
||||
| Stratifier Type | Supported |
|
||||
|------------------|:--------------------:|
|
||||
| Single Component | :white_check_mark: |
|
||||
| Multi Component | :white_large_square: |
|
||||
|
||||
#### Evaluated Resources
|
||||
|
||||
A FHIR MeasureReport permits referencing the Resources used when evaluating in the [MeasureReport.evaluatedResource](https://www.hl7.org/fhir/measurereport-definitions.html#MeasureReport.evaluatedResource) element. HAPI includes these resources when generating `subject` reports for a single Patient. Evaluated resources for `population` or `subject-list` reports are not included. For large populations this could quickly become an extremely large number of resources.
|
||||
|
||||
The evaluated resources will not include every resource on the HAPI server for a given subject. Rather, it includes only the resources that were retrieved from the server by the CQL logic that was evaluated. This corresponds to the data-requirements for a given Measure. As an example, consider the following CQL:
|
||||
|
||||
```cql
|
||||
valueset "Example Value Set" : 'http://fhir.org/example-value-set'
|
||||
|
||||
define "Example Observations":
|
||||
[Observation : "Example Value Set"]
|
||||
```
|
||||
|
||||
That CQL will only select Observation Resources that have a code in the "Example Value Set". Those Observations will be reported in the Evaluated Resources while any others will not.
|
||||
|
||||
#### Last Received On
|
||||
|
||||
The `lastReceivedOn` parameter is the date the Measure was evaluated and reported. It is used to limit the number of resources reported in the Measure report for individual reports. It is currently not supported by HAPI.
|
||||
|
||||
#### Extensions
|
||||
|
||||
A number of extensions to Measure evaluation defined by various IGs are supported. They are described briefly in the table below.
|
||||
|
||||
| Extension | Description |
|
||||
|---------------------------------------------------------------------------------------|--------------------------------------------------------------------------------|
|
||||
| http://hl7.org/fhir/us/cqframework/cqfmeasures/StructureDefinition/cqfm-productLine | Used to evaluate different product lines (e.g. Medicare, Private, etc.) |
|
||||
| http://hl7.org/fhir/StructureDefinition/cqf-measureInfo | Used to denote a Measure Observation |
|
||||
| http://hl7.org/fhir/us/davinci-deqm/StructureDefinition/extension-populationReference | Used to specify the population that triggered a particular `evaluatedResource` |
|
||||
|
||||
There's not currently a way to configure which extensions are enabled. All supported extensions are always enabled.
|
||||
|
||||
## FAQs
|
||||
|
||||
Q: I get an error saying HAPI can't locate my library, and I've verified it's on the server.
|
||||
|
||||
A: HAPI follows the [Library conformance requirements](https://build.fhir.org/ig/HL7/cqf-measures/measure-conformance.html#conformance-requirement-3-1) defined by the CQF Measures IG, meaning the Library must have a `logic-library` type, the name and versions of the FHIR Library and CQL Library must match, and the url of the Library must end in the name of the Library.
|
||||
|
||||
FHIR Libraries generated from CQL via the IG Publisher follow these requirements automatically.
|
||||
|
||||
Q: Does HAPI support partitions for evaluation?
|
||||
|
||||
A: Yes, though the Measure and associated Resources must be in the same partition as the clinical data being used.
|
||||
|
||||
## Roadmap
|
||||
|
||||
* Complete cohort implementation
|
||||
* Support for component stratifiers
|
||||
* Support for FHIRPath expressions in Stratifiers, Supplemental Data Elements, and Population Criteria
|
||||
* `$data-requirements`, `$collect-data`, `$submit-data`, and `$care-gaps` operations
|
||||
* Support for more extensions defined in the CQF Measures, CPG, and ATR IGs
|
@ -0,0 +1,28 @@
|
||||
# Clinical Reasoning
|
||||
|
||||
## Overview
|
||||
|
||||
Clinical Reasoning (CR) is ability to represent, encode, and evaluate clinical knowledge so that it can be integrated into clinical systems. In other words, clinical reasoning is the ability to store and run business logic that is relevant to clinical settings. This may be as simple as controlling whether a particular section of an order set appears based on the conditions that a patient has, or it may be as complex as representing the care pathway for a patient with multiple conditions.
|
||||
|
||||
The FHIR [Clinical Reasoning module](http://www.hl7.org/fhir/clinicalreasoning-module.html) specifies a foundational set of FHIR resources and associated operations that allow a FHIR repository to perform clinical reasoning on clinical data. Some use cases include:
|
||||
|
||||
* Prospective/Retrospective Analytics
|
||||
* Quality Measures
|
||||
* Gaps in Care
|
||||
* Clinical Decision Support
|
||||
* Payer/Provider Data Exchange
|
||||
* Prior Authorization
|
||||
|
||||
There are additional IGs outside the FHIR CR module that define further requirements and behavior for other Clinical Reasoning use cases. Some examples include:
|
||||
|
||||
* [Structured Data Capture IG](https://build.fhir.org/ig/HL7/sdc/)
|
||||
* [Clinical Guidelines IG](https://hl7.org/fhir/uv/cpg/)
|
||||
* [Quality Measures IG](http://hl7.org/fhir/us/cqfmeasures/)
|
||||
|
||||
## HAPI FHIR
|
||||
|
||||
The HAPI FHIR server includes support for storing all the Clinical Reasoning resources defined in the FHIR CR module, including `Measure`, `PlanDefinition`, `ActivityDefinition` and so on. Additionally, HAPI includes an embedded [CQL](/hapi-fhir/docs/clinical_reasoning/cql.html) engine that allows it to process clinical logic encoded in a standard representation.
|
||||
|
||||
HAPI also includes a [Quality Measure](/hapi-fhir/docs/clinical_reasoning/measures.html) engine that can evaluate clinical quality measures.
|
||||
|
||||
See the [CQL](/hapi-fhir/docs/clinical_reasoning/cql.html) and [Measure](/hapi-fhir/docs/clinical_reasoning/measures.html) documentation for further details.
|
@ -85,6 +85,11 @@ page.server_jpa_partitioning.enabling_in_hapi_fhir=Enabling Partitioning in HAPI
|
||||
section.server_jpa_batch.title=JPA Server: Batch Processing
|
||||
page.server_jpa_batch.introduction=Batch Introduction
|
||||
|
||||
section.clinical_reasoning.title=Clinical Reasoning
|
||||
page.clinical_reasoning.overview=Clinical Reasoning Overview
|
||||
page.clinical_reasoning.cql=CQL
|
||||
page.clinical_reasoning.measures=Measures
|
||||
|
||||
section.interceptors.title=Interceptors
|
||||
page.interceptors.interceptors=Interceptors Overview
|
||||
page.interceptors.client_interceptors=Client Interceptors
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
HAPI provides several ways to add [Narrative Text](http://hl7.org/fhir/narrative.html) to your encoded messages.
|
||||
|
||||
The simplest way is to simply place the narrative text directly in the resource via the `setDivAsString()` method.
|
||||
The simplest way is to place the narrative text directly in the resource via the `setDivAsString()` method.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/Narrative.java|simple}}
|
||||
@ -18,7 +18,7 @@ HAPI's built-in narrative generation uses the [Thymeleaf](http://www.thymeleaf.o
|
||||
|
||||
## A Simple Example
|
||||
|
||||
Activating HAPI's built-in narrative generator is as simple as calling [setNarrativeGenerator](/hapi-fhir/apidocs/hapi-fhir-base/ca/uhn/fhir/context/FhirContext.html#setNarrativeGenerator(ca.uhn.fhir.narrative.INarrativeGenerator).
|
||||
Activating HAPI's built-in narrative generator is as simple as calling [setNarrativeGenerator](/hapi-fhir/apidocs/hapi-fhir-base/ca/uhn/fhir/context/FhirContext.html#setNarrativeGenerator(ca.uhn.fhir.narrative.INarrativeGenerator)).
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/Narrative.java|example1}}
|
||||
@ -54,8 +54,8 @@ Note that these templates expect a few specific CSS definitions to be present in
|
||||
|
||||
To use your own templates for narrative generation, simply create one or more templates, using the Thymeleaf HTML based syntax.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/fhir/narrative/OperationOutcome.html}}
|
||||
```html
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/snippet/OperationOutcome.html}}
|
||||
```
|
||||
|
||||
Then create a properties file which describes your templates. In this properties file, each resource to be defined has a pair or properties.
|
||||
@ -79,13 +79,16 @@ vitalsigns.profile=http://hl7.org/fhir/StructureDefinition/vitalsigns
|
||||
vitalsigns.narrative=classpath:com/example/narrative/Observation_Vitals.html
|
||||
```
|
||||
|
||||
You may also override/define behaviour for datatypes and other structures. These datatype narrative definitions will be used as content within <code>th:narrative</code> blocks in resource templates. See the example resource template above for an example.
|
||||
You may also override/define behaviour for datatypes and other structures. These datatype narrative definitions will be used as content within <code>th:narrative</code> blocks in resource templates. See the [example above](#creating-your-own-templates).
|
||||
|
||||
```properties
|
||||
# You can create a template based on a type name
|
||||
quantity.dataType=Quantity
|
||||
quantity.narrative=classpath:com/example/narrative/Quantity.html
|
||||
|
||||
string.dataType=String
|
||||
string.narrative=classpath:com/example/narrative/String.html
|
||||
|
||||
# Or by class name, which can be useful for custom datatypes and structures
|
||||
custom_extension.class=com.example.model.MyCustomExtension
|
||||
custom_extension.narrative=classpath:com/example/narrative/CustomExtension.html
|
||||
@ -105,13 +108,13 @@ Thymeleaf has a concept called Fragments, which allow reusable template portions
|
||||
{{snippet:classpath:ca/uhn/fhir/narrative/narrative-with-fragment.properties}}
|
||||
```
|
||||
|
||||
The following template declares a fragment (this is `narrative-with-fragment-child.html` in the example above):
|
||||
The following template declares `Fragment1` and `Fragment2` as part of file `narrative-with-fragment-child.html`:
|
||||
|
||||
```html
|
||||
{{snippet:classpath:ca/uhn/fhir/narrative/narrative-with-fragment-child.html}}
|
||||
```
|
||||
|
||||
And the following template uses it (this is `narrative-with-fragment-child.html` in the example above):
|
||||
And the following parent template (`narrative-with-fragment-parent.html`) imports `Fragment1` with parameter 'blah':
|
||||
|
||||
```html
|
||||
{{snippet:classpath:ca/uhn/fhir/narrative/narrative-with-fragment-parent.html}}
|
||||
|
@ -20,12 +20,12 @@ Clients may sometimes post resources to your server that contain absolute resour
|
||||
|
||||
By default, the server will reject this reference, as only local references are permitted by the server. This can be changed however.
|
||||
|
||||
If you want the server to recognize that this URL is actually a local reference (i.e. because the server will be deployed to the base URL `http://example.com/fhir/`) you can configure the server to recognize this URL via the following DaoConfig setting:
|
||||
If you want the server to recognize that this URL is actually a local reference (i.e. because the server will be deployed to the base URL `http://example.com/fhir/`) you can configure the server to recognize this URL via the following JpaStorageSettings setting:
|
||||
|
||||
```java
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
// ... other config ...
|
||||
retVal.getTreatBaseUrlsAsLocal().add("http://example.com/fhir/");
|
||||
return retVal;
|
||||
@ -36,8 +36,8 @@ On the other hand, if you want the server to be configurable to allow remote ref
|
||||
|
||||
```java
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
// Allow external references
|
||||
retVal.setAllowExternalReferences(true);
|
||||
|
||||
@ -59,19 +59,19 @@ etc. For example, you might refer to the ValueSet `http://hl7.org/fhir/ValueSet/
|
||||
resources. In this case, you are not necessarily telling the server that this is a real address that it should resolve,
|
||||
but rather that this is an identifier for a ValueSet where `ValueSet.url` has the given URI/URL.
|
||||
|
||||
HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical
|
||||
HAPI can be configured to treat certain URI/URL patterns as logical by using the JpaStorageSettings#setTreatReferencesAsLogical
|
||||
property (
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set)))
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#setTreatReferencesAsLogical(java.util.Set)))
|
||||
.
|
||||
|
||||
For example:
|
||||
|
||||
```java
|
||||
// Treat specific URL as logical
|
||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
|
||||
myStorageSettings.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
|
||||
|
||||
// Treat all references with given prefix as logical
|
||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
|
||||
myStorageSettings.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
|
||||
```
|
||||
|
||||
## Referential Integrity
|
||||
@ -88,8 +88,8 @@ Referential integrity can be configured on two levels: `write` and `delete`.
|
||||
#### JPA Server
|
||||
```java
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
// ... other config ...
|
||||
retVal.setEnforceReferentialIntegrityOnWrite(true);
|
||||
retVal.setEnforceReferentialIntegrityOnDelete(true);
|
||||
@ -116,7 +116,7 @@ Under many normal scenarios this is a n acceptable performance tradeoff, but in
|
||||
You can change the global cache using the following setting:
|
||||
|
||||
```java
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
||||
```
|
||||
|
||||
### Disable Cache at the Request Level
|
||||
@ -168,5 +168,5 @@ X-Retry-On-Version-Conflict: retry; max-retries=100
|
||||
|
||||
Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE
|
||||
?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#getExpungeBatchSize())
|
||||
property.
|
||||
|
@ -7,7 +7,7 @@ This is required to support the `_content`, or `_text` search parameters.
|
||||
|
||||
Additional indexing is implemented for simple search parameters of type token, string, and reference.
|
||||
These implement the basic search, as well as several modifiers:
|
||||
This **experimental** feature is enabled via the `setAdvancedHSearchIndexing()` property of DaoConfig.
|
||||
This **experimental** feature is enabled via the `setAdvancedHSearchIndexing()` property of JpaStorageSettings.
|
||||
|
||||
## Search Parameter Support
|
||||
|
||||
@ -103,7 +103,7 @@ search index. This allows some queries to return results without using the rela
|
||||
Note: This does not support the $meta-add or $meta-delete operations. Full reindexing is required
|
||||
when this option is enabled after resources have been indexed.
|
||||
|
||||
This **experimental** feature is enabled via the `setStoreResourceInHSearchIndex()` option of DaoConfig.
|
||||
This **experimental** feature is enabled via the `setStoreResourceInHSearchIndex()` option of JpaStorageSettings.
|
||||
|
||||
# Synchronous Writes
|
||||
|
||||
|
@ -24,9 +24,9 @@ The grouping of Observation resources by `Observation.code` means that the `$las
|
||||
|
||||
# Deployment and Configuration
|
||||
|
||||
The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled
|
||||
The `$lastn` operation is disabled by default. The operation can be enabled by setting the JpaStorageSettings#setLastNEnabled
|
||||
property (
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean)))
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setLastNEnabled(boolean)))
|
||||
.
|
||||
|
||||
In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters
|
||||
|
@ -6,7 +6,7 @@ This page contains information for performance optimization. If you are planning
|
||||
|
||||
The FHIR history operation allows clients to see a change history for a resource, across all resources of a given type, or even across all resources on a server. This operation includes a total count (in `Bundle.total`) that can be very expensive to calculate on large databases with many resources.
|
||||
|
||||
As a result, a setting on the `DaoConfig` object has been added called **History Count Mode**. This setting has 3 possible options:
|
||||
As a result, a setting on the `JpaStorageSettings` object has been added called **History Count Mode**. This setting has 3 possible options:
|
||||
|
||||
* COUNT_CACHED. This is the new default: A loading cache will be used for history counts without any dates specified, meaning that counts are stored in RAM for up to one minute, and the loading cache blocks all but one client thread per JVM from actually performing the count. This effectively throttles access to the database. History operation invocations that include a `_since` or `_to` parameter will never have a count included in the results.
|
||||
|
||||
|
@ -255,11 +255,11 @@ an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE**
|
||||
visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server.
|
||||
|
||||
If the server has been configured with
|
||||
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
|
||||
of [UUID](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or the
|
||||
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.JpaStorageSettings.IdStrategyEnum))
|
||||
of [UUID](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.IdStrategyEnum.html#UUID), or the
|
||||
server has been configured with
|
||||
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum))
|
||||
of [ANY](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY)
|
||||
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.JpaStorageSettings.ClientIdStrategyEnum))
|
||||
of [ANY](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.ClientIdStrategyEnum.html#ANY)
|
||||
the server will create a Forced ID for all resources (not only resources having textual IDs).
|
||||
|
||||
## Columns
|
||||
|
@ -41,7 +41,7 @@ As a result, in HAPI FHIR JPA 3.6.0, an efficient way of upgrading existing data
|
||||
In order to perform a migration using this functionality, the following steps should be followed:
|
||||
|
||||
* Stop your running HAPI FHIR JPA instance (and remember to make a backup of your database before proceeding with any changes!)
|
||||
* Modify your `DaoConfig` to specify that hash-based searches should not be used, using the following setting: `myDaoConfig.setDisableHashBasedSearches(true);`
|
||||
* Modify your `JpaStorageSettings` to specify that hash-based searches should not be used, using the following setting: `myStorageSettings.setDisableHashBasedSearches(true);`
|
||||
* Make sure that you have your JPA settings configured to not automatically create database indexes and columns using the following setting in your JPA Properties: `extraProperties.put("hibernate.hbm2ddl.auto", "none");`
|
||||
* Run the database migrator command, including the entry `-x no-migrate-350-hashes` on the command line. For example:
|
||||
|
||||
@ -60,7 +60,7 @@ SELECT * FROM HFJ_RES_REINDEX_JOB
|
||||
|
||||
* When this query no longer returns any rows, the reindexing process is complete.
|
||||
* At this time, HAPI FHIR should be stopped once again in order to convert it to using the hash based indexes.
|
||||
* Modify your `DaoConfig` to specify that hash-based searches are used, using the following setting (this is the default setting, so it could also simply be omitted): `myDaoConfig.setDisableHashBasedSearches(false);`
|
||||
* Modify your `JpaStorageSettings` to specify that hash-based searches are used, using the following setting (this is the default setting, so it could also simply be omitted): `myStorageSettings.setDisableHashBasedSearches(false);`
|
||||
* Execute the migrator tool again, this time omitting the flag option, e.g.
|
||||
|
||||
```bash
|
||||
|
@ -33,8 +33,8 @@ One important caveat is that chaining is currently not supported when using this
|
||||
## Enabling MDM Expansion
|
||||
|
||||
On top of needing to instantiate an MDM module, you must enable this feature in
|
||||
the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using
|
||||
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean))
|
||||
the [StorageSettings](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html) bean, using
|
||||
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#setAllowMdmExpansion(boolean))
|
||||
property.
|
||||
|
||||
<div class="helpWarningCalloutBox">
|
||||
|
@ -708,7 +708,7 @@ This operation takes two optional Parameters.
|
||||
<td>0..1</td>
|
||||
<td>
|
||||
The number of links that should be deleted at a time. If omitted, then the batch size will be determined by the value
|
||||
of [Reindex Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getReindexBatchSize())
|
||||
of [Reindex Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/StorageConfig.html#getReindexBatchSize())
|
||||
property.
|
||||
</td>
|
||||
</tr>
|
||||
|
@ -57,7 +57,7 @@ This fact can have security implications:
|
||||
in use in another partition.
|
||||
|
||||
* In a server using the default configuration of
|
||||
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
|
||||
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.JpaStorageSettings.IdStrategyEnum))
|
||||
a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned.
|
||||
|
||||
These considerations can be addressed by using UUID Server ID Strategy, and disallowing client-assigned IDs.
|
||||
|
@ -137,7 +137,7 @@ If you wish to update a historical version of a resource without creating a new
|
||||
Update operation. While this operation is not supported by the FHIR specification, it's an enhancement added to
|
||||
specifically to HAPI-FHIR.
|
||||
|
||||
In order to use this new functionality, you must set the `setUpdateWithHistoryRewriteEnabled` setting in the `DaoConfig`
|
||||
In order to use this new functionality, you must set the `setUpdateWithHistoryRewriteEnabled` setting in the `StorageSettings`
|
||||
to true.
|
||||
|
||||
The following API request shows an example of executing a PUT at the following endpoint.
|
||||
|
@ -0,0 +1,23 @@
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" type="text/css" href="narrative.css"/>
|
||||
</head>
|
||||
<body>
|
||||
<!--*/-->
|
||||
<div>
|
||||
<h1>Operation Outcome</h1>
|
||||
<table border="0">
|
||||
<tr th:each="issue : ${resource.issue}">
|
||||
<td th:text="${issue.severityElement.value}" style="font-weight: bold;"></td>
|
||||
<td th:text="${issue.location}"></td>
|
||||
<td th:narrative="${issue.diagnostics}"></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!--*/-->
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
@ -101,6 +101,22 @@ The `migrate-database` command may be used to Migrate a database schema when upg
|
||||
|
||||
See [Upgrading HAPI FHIR JPA](/docs/server_jpa/upgrading.html) for information on how to use this command.
|
||||
|
||||
# Clear Migration lock
|
||||
|
||||
the `clear-migration-lock` command should be used if an upgrade to HAPI-FHIR failed during a migration. The migration system creates a lock row when it begins. If the migration is cancelled before it finishes, the system will be left in an inconsistent state. In order to resume the migration, the lock row must be removed. From your migration logs, you will see a line which looks like the following:
|
||||
|
||||
```text
|
||||
Migration Lock Row added. [uuid=05931c87-c2a4-49d6-8d82-d8ce09fdd8ef]
|
||||
```
|
||||
|
||||
In order to clear this migration lock, you can run:
|
||||
|
||||
```bash
|
||||
clear-migration-lock --lock-uuid 05931c87-c2a4-49d6-8d82-d8ce09fdd8ef
|
||||
```
|
||||
|
||||
|
||||
|
||||
# Reindex Terminology
|
||||
|
||||
The `reindex-terminology` command may be used to recreate freetext indexes for terminology resources.
|
||||
|
@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -12,6 +12,7 @@ import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.net.ssl.SSLException;
|
||||
import javax.net.ssl.SSLHandshakeException;
|
||||
import javax.ws.rs.client.Client;
|
||||
import javax.ws.rs.core.Response;
|
||||
@ -23,6 +24,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.core.IsNot.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
/**
|
||||
@ -99,7 +101,7 @@ public class JaxRsRestfulClientFactoryTest extends BaseFhirVersionParameterizedT
|
||||
.get(Response.class);
|
||||
fail();
|
||||
} catch (Exception e) {
|
||||
assertEquals(SSLHandshakeException.class, e.getCause().getClass());
|
||||
assertTrue(e.getCause() instanceof SSLException);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,25 +1,26 @@
|
||||
package ca.uhn.fhir.jaxrs.server.util;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.hl7.fhir.dstu3.model.*;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.SummaryEnum;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
import org.hl7.fhir.dstu3.model.Binary;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.IdType;
|
||||
import org.hl7.fhir.dstu3.model.Patient;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import javax.ws.rs.core.Response;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class JaxRsResponseDstu3Test {
|
||||
|
||||
|
@ -1,28 +1,26 @@
|
||||
package ca.uhn.fhir.jaxrs.server.util;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import ca.uhn.fhir.model.dstu2.resource.Binary;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Patient;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SummaryEnum;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import javax.ws.rs.core.Response;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class JaxRsResponseTest {
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.0-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -29,13 +29,13 @@ import ca.uhn.fhir.batch2.model.MarkWorkChunkAsErrorRequest;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.batch2.models.JobInstanceFetchRequest;
|
||||
import ca.uhn.fhir.util.Logs;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||
import ca.uhn.fhir.jpa.util.JobInstanceUtil;
|
||||
import ca.uhn.fhir.model.api.PagingIterator;
|
||||
import ca.uhn.fhir.util.Logs;
|
||||
import org.apache.commons.collections4.ListUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -21,9 +21,8 @@ package ca.uhn.fhir.jpa.binstore;
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.binary.svc.BaseBinaryStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.binary.api.StoredDetails;
|
||||
import ca.uhn.fhir.jpa.binary.svc.BaseBinaryStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBinaryStorageEntityDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
@ -35,14 +34,12 @@ import org.hibernate.LobHelper;
|
||||
import org.hibernate.Session;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
@ -58,10 +55,6 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
||||
private EntityManager myEntityManager;
|
||||
@Autowired
|
||||
private IBinaryStorageEntityDao myBinaryStorageEntityDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myPlatformTransactionManager;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
|
@ -20,38 +20,42 @@ package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Binary;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
@ -59,26 +63,22 @@ import static org.slf4j.LoggerFactory.getLogger;
|
||||
public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJobSchedulingHelper, IHasScheduledJobs {
|
||||
private static final Logger ourLog = getLogger(BulkDataExportJobSchedulingHelperImpl.class);
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
private final DaoRegistry myDaoRegistry;
|
||||
|
||||
@Autowired
|
||||
private IBulkExportCollectionDao myBulkExportCollectionDao;
|
||||
|
||||
@Autowired
|
||||
private IBulkExportCollectionFileDao myBulkExportCollectionFileDao;
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
private final PlatformTransactionManager myTxManager;
|
||||
private final JpaStorageSettings myDaoConfig;
|
||||
private final BulkExportHelperService myBulkExportHelperSvc;
|
||||
private final IJobPersistence myJpaJobPersistence;
|
||||
private TransactionTemplate myTxTemplate;
|
||||
|
||||
@Autowired
|
||||
private IBulkExportJobDao myBulkExportJobDao;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private BulkExportHelperService myBulkExportHelperSvc;
|
||||
public BulkDataExportJobSchedulingHelperImpl(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, JpaStorageSettings theDaoConfig, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence, TransactionTemplate theTxTemplate) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
myTxManager = theTxManager;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myBulkExportHelperSvc = theBulkExportHelperSvc;
|
||||
myJpaJobPersistence = theJpaJobPersistence;
|
||||
myTxTemplate = theTxTemplate;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
@ -97,15 +97,10 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public synchronized void cancelAndPurgeAllJobs() {
|
||||
myTxTemplate.execute(t -> {
|
||||
ourLog.info("Deleting all files");
|
||||
myBulkExportCollectionFileDao.deleteAllFiles();
|
||||
ourLog.info("Deleting all collections");
|
||||
myBulkExportCollectionDao.deleteAllFiles();
|
||||
ourLog.info("Deleting all jobs");
|
||||
myBulkExportJobDao.deleteAllFiles();
|
||||
return null;
|
||||
});
|
||||
// This is called by unit test code that also calls ExpungeEverythingService,
|
||||
// which explicitly deletes both Batch2WorkChunkEntity and Batch2JobInstanceEntity, as well as ResourceTable, in
|
||||
// which Binary's are stored
|
||||
// Long story short, this method no longer needs to do anything
|
||||
}
|
||||
|
||||
/**
|
||||
@ -116,51 +111,111 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
|
||||
@Override
|
||||
public void purgeExpiredFiles() {
|
||||
if (!myDaoConfig.isEnableTaskBulkExportJobExecution()) {
|
||||
ourLog.debug("bulk export disabled: doing nothing");
|
||||
return;
|
||||
}
|
||||
|
||||
Optional<BulkExportJobEntity> jobToDelete = myTxTemplate.execute(t -> {
|
||||
Pageable page = PageRequest.of(0, 1);
|
||||
Slice<BulkExportJobEntity> submittedJobs = myBulkExportJobDao.findNotRunningByExpiry(page, new Date());
|
||||
if (submittedJobs.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(submittedJobs.getContent().get(0));
|
||||
});
|
||||
final List<JobInstance> jobInstancesToDelete = myTxTemplate.execute(t ->
|
||||
myJpaJobPersistence.fetchInstances(Batch2JobDefinitionConstants.BULK_EXPORT,
|
||||
StatusEnum.getEndedStatuses(),
|
||||
computeCutoffFromConfig(),
|
||||
PageRequest.of(0, 50))
|
||||
);
|
||||
|
||||
if (jobToDelete.isPresent()) {
|
||||
ourLog.info("Deleting bulk export job: {}", jobToDelete.get());
|
||||
if (jobInstancesToDelete == null || jobInstancesToDelete.isEmpty()) {
|
||||
ourLog.debug("No batch 2 bulk export jobs found! Nothing to do!");
|
||||
ourLog.info("Finished bulk export job deletion with nothing to do");
|
||||
return;
|
||||
}
|
||||
|
||||
for (JobInstance jobInstance : jobInstancesToDelete) {
|
||||
ourLog.info("Deleting batch 2 bulk export job: {}", jobInstance);
|
||||
|
||||
myTxTemplate.execute(t -> {
|
||||
BulkExportJobEntity job = myBulkExportJobDao.getOne(jobToDelete.get().getId());
|
||||
for (BulkExportCollectionEntity nextCollection : job.getCollections()) {
|
||||
for (BulkExportCollectionFileEntity nextFile : nextCollection.getFiles()) {
|
||||
final Optional<JobInstance> optJobInstanceForInstanceId = myJpaJobPersistence.fetchInstance(jobInstance.getInstanceId());
|
||||
|
||||
ourLog.info("Purging bulk data file: {}", nextFile.getResourceId());
|
||||
IIdType id = myBulkExportHelperSvc.toId(nextFile.getResourceId());
|
||||
getBinaryDao().delete(id, new SystemRequestDetails());
|
||||
getBinaryDao().forceExpungeInExistingTransaction(id, new ExpungeOptions().setExpungeDeletedResources(true).setExpungeOldVersions(true), new SystemRequestDetails());
|
||||
myBulkExportCollectionFileDao.deleteByPid(nextFile.getId());
|
||||
|
||||
}
|
||||
|
||||
myBulkExportCollectionDao.deleteByPid(nextCollection.getId());
|
||||
if (optJobInstanceForInstanceId.isEmpty()) {
|
||||
ourLog.error("Can't find job instance for ID: {} despite having retrieved it in the first step", jobInstance.getInstanceId());
|
||||
return null;
|
||||
}
|
||||
|
||||
ourLog.debug("*** About to delete job with ID {}", job.getId());
|
||||
myBulkExportJobDao.deleteByPid(job.getId());
|
||||
final JobInstance jobInstanceForInstanceId = optJobInstanceForInstanceId.get();
|
||||
ourLog.info("Deleting bulk export job: {}", jobInstanceForInstanceId);
|
||||
|
||||
// We need to keep these for investigation but we also need a process to manually delete these jobs once we're done investigating
|
||||
if (StatusEnum.FAILED == jobInstanceForInstanceId.getStatus()) {
|
||||
ourLog.info("skipping because the status is FAILED for ID: {}" + jobInstanceForInstanceId.getInstanceId());
|
||||
return null;
|
||||
}
|
||||
|
||||
purgeBinariesIfNeeded(jobInstanceForInstanceId, jobInstanceForInstanceId.getReport());
|
||||
|
||||
final String batch2BulkExportJobInstanceId = jobInstanceForInstanceId.getInstanceId();
|
||||
ourLog.debug("*** About to delete batch 2 bulk export job with ID {}", batch2BulkExportJobInstanceId);
|
||||
|
||||
myJpaJobPersistence.deleteInstanceAndChunks(batch2BulkExportJobInstanceId);
|
||||
|
||||
ourLog.info("Finished deleting bulk export job: {}", jobInstance.getInstanceId());
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
ourLog.info("Finished deleting bulk export job: {}", jobToDelete.get());
|
||||
ourLog.info("Finished deleting bulk export jobs");
|
||||
}
|
||||
}
|
||||
|
||||
private void purgeBinariesIfNeeded(JobInstance theJobInstanceForInstanceId, String theJobInstanceReportString) {
|
||||
final Optional<BulkExportJobResults> optBulkExportJobResults = getBulkExportJobResults(theJobInstanceReportString);
|
||||
|
||||
if (optBulkExportJobResults.isPresent()) {
|
||||
final BulkExportJobResults bulkExportJobResults = optBulkExportJobResults.get();
|
||||
ourLog.debug("job: {} resource type to binary ID: {}", theJobInstanceForInstanceId.getInstanceId(), bulkExportJobResults.getResourceTypeToBinaryIds());
|
||||
|
||||
final Map<String, List<String>> resourceTypeToBinaryIds = bulkExportJobResults.getResourceTypeToBinaryIds();
|
||||
for (String resourceType : resourceTypeToBinaryIds.keySet()) {
|
||||
final List<String> binaryIds = resourceTypeToBinaryIds.get(resourceType);
|
||||
for (String binaryId : binaryIds) {
|
||||
ourLog.info("Purging batch 2 bulk export binary: {}", binaryId);
|
||||
IIdType id = myBulkExportHelperSvc.toId(binaryId);
|
||||
getBinaryDao().delete(id, new SystemRequestDetails());
|
||||
}
|
||||
}
|
||||
} // else we can't know what the binary IDs are, so delete this job and move on
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private IFhirResourceDao<IBaseBinary> getBinaryDao() {
|
||||
return myDaoRegistry.getResourceDao("Binary");
|
||||
return myDaoRegistry.getResourceDao(Binary.class.getSimpleName());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Optional<BulkExportJobResults> getBulkExportJobResults(String theJobInstanceReportString) {
|
||||
if (StringUtils.isBlank(theJobInstanceReportString)) {
|
||||
ourLog.error(String.format("Cannot parse job report string because it's null or blank: %s", theJobInstanceReportString));
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
try {
|
||||
return Optional.of(JsonUtil.deserialize(theJobInstanceReportString, BulkExportJobResults.class));
|
||||
} catch (Exception theException) {
|
||||
ourLog.error(String.format("Cannot parse job report string: %s", theJobInstanceReportString), theException);
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Date computeCutoffFromConfig() {
|
||||
final int bulkExportFileRetentionPeriodHours = myDaoConfig.getBulkExportFileRetentionPeriodHours();
|
||||
|
||||
final LocalDateTime cutoffLocalDateTime = LocalDateTime.now()
|
||||
.minusHours(bulkExportFileRetentionPeriodHours);
|
||||
|
||||
return Date.from(cutoffLocalDateTime
|
||||
.atZone(ZoneId.systemDefault())
|
||||
.toInstant());
|
||||
}
|
||||
|
||||
|
||||
public static class PurgeExpiredFilesJob implements HapiJob {
|
||||
@Autowired
|
||||
private IBulkDataExportJobSchedulingHelper myTarget;
|
||||
|
@ -1,41 +0,0 @@
|
||||
package ca.uhn.fhir.jpa.bulk.export.svc;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
@Service
|
||||
public class BulkExportCollectionFileDaoSvc {
|
||||
|
||||
@Autowired
|
||||
private IBulkExportCollectionFileDao myBulkExportCollectionFileDao;
|
||||
|
||||
@Transactional
|
||||
public void save(BulkExportCollectionFileEntity theBulkExportCollectionEntity) {
|
||||
myBulkExportCollectionFileDao.saveAndFlush(theBulkExportCollectionEntity);
|
||||
}
|
||||
|
||||
}
|
@ -39,7 +39,6 @@ import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
|
||||
@ -47,6 +46,7 @@ import ca.uhn.fhir.mdm.dao.IMdmLinkDao;
|
||||
import ca.uhn.fhir.mdm.model.MdmPidTuple;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.rest.param.HasOrListParam;
|
||||
import ca.uhn.fhir.rest.param.HasParam;
|
||||
|
@ -23,7 +23,8 @@ package ca.uhn.fhir.jpa.bulk.imprt.svc;
|
||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.util.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ActivateJobResult;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
|
||||
@ -38,7 +39,6 @@ import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.Logs;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import com.apicatalog.jsonld.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
@ -79,7 +79,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ
|
||||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
@ -163,7 +163,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public ActivateJobResult activateNextReadyJob() {
|
||||
if (!myDaoConfig.isEnableTaskBulkImportJobExecution()) {
|
||||
if (!myStorageSettings.isEnableTaskBulkImportJobExecution()) {
|
||||
Logs.getBatchTroubleshootingLog().trace("Bulk import job execution is not enabled on this server. No action taken.");
|
||||
return new ActivateJobResult(false, null);
|
||||
}
|
||||
@ -295,7 +295,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ
|
||||
|
||||
ourLog.info("Submitting bulk import with bijob id {} to job scheduler", biJobId);
|
||||
|
||||
return myJobCoordinator.startInstance(request).getJobId();
|
||||
return myJobCoordinator.startInstance(request).getInstanceId();
|
||||
}
|
||||
|
||||
private void addFilesToJob(@Nonnull List<BulkImportJobFileJson> theInitialFiles, BulkImportJobEntity job, int nextSequence) {
|
||||
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.config;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
@ -48,7 +48,7 @@ public class Batch2SupportConfig {
|
||||
}
|
||||
|
||||
@Bean
|
||||
DeleteExpungeSqlBuilder deleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, DaoConfig theDaoConfig, IIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) {
|
||||
return new DeleteExpungeSqlBuilder(theResourceTableFKProvider, theDaoConfig, theIdHelper, theResourceLinkDao);
|
||||
DeleteExpungeSqlBuilder deleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, JpaStorageSettings theStorageSettings, IIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) {
|
||||
return new DeleteExpungeSqlBuilder(theResourceTableFKProvider, theStorageSettings, theIdHelper, theResourceLinkDao);
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeJobSubmitterImpl;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
@ -7,7 +8,7 @@ import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
@ -16,6 +17,7 @@ import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.bulk.export.provider.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportJobSchedulingHelperImpl;
|
||||
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportHelperService;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.svc.BulkDataImportSvcImpl;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
@ -161,6 +163,7 @@ import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
||||
import org.springframework.scheduling.TaskScheduler;
|
||||
import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
|
||||
import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
@ -211,7 +214,7 @@ public class JpaConfig {
|
||||
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
|
||||
|
||||
@Autowired
|
||||
public DaoConfig myDaoConfig;
|
||||
public JpaStorageSettings myStorageSettings;
|
||||
|
||||
@Bean("myDaoRegistry")
|
||||
public DaoRegistry daoRegistry() {
|
||||
@ -288,16 +291,16 @@ public class JpaConfig {
|
||||
|
||||
@Bean(name = "myBinaryStorageInterceptor")
|
||||
@Lazy
|
||||
public BinaryStorageInterceptor<? extends IPrimitiveDatatype<byte[]>> binaryStorageInterceptor(DaoConfig theDaoConfig, FhirContext theCtx) {
|
||||
public BinaryStorageInterceptor<? extends IPrimitiveDatatype<byte[]>> binaryStorageInterceptor(JpaStorageSettings theStorageSettings, FhirContext theCtx) {
|
||||
BinaryStorageInterceptor<? extends IPrimitiveDatatype<byte[]>> interceptor = new BinaryStorageInterceptor<>(theCtx);
|
||||
interceptor.setAllowAutoInflateBinaries(theDaoConfig.isAllowAutoInflateBinaries());
|
||||
interceptor.setAutoInflateBinariesMaximumSize(theDaoConfig.getAutoInflateBinariesMaximumBytes());
|
||||
interceptor.setAllowAutoInflateBinaries(theStorageSettings.isAllowAutoInflateBinaries());
|
||||
interceptor.setAutoInflateBinariesMaximumSize(theStorageSettings.getAutoInflateBinariesMaximumBytes());
|
||||
return interceptor;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MemoryCacheService memoryCacheService(DaoConfig theDaoConfig) {
|
||||
return new MemoryCacheService(theDaoConfig);
|
||||
public MemoryCacheService memoryCacheService(JpaStorageSettings theStorageSettings) {
|
||||
return new MemoryCacheService(theStorageSettings);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ -451,8 +454,8 @@ public class JpaConfig {
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper() {
|
||||
return new BulkDataExportJobSchedulingHelperImpl();
|
||||
public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, JpaStorageSettings theStorageSettings, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence) {
|
||||
return new BulkDataExportJobSchedulingHelperImpl(theDaoRegistry, theTxManager, theStorageSettings, theBulkExportHelperSvc, theJpaJobPersistence, null);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ -659,7 +662,7 @@ public class JpaConfig {
|
||||
|
||||
@Bean
|
||||
public SearchStrategyFactory searchStrategyFactory(@Autowired(required = false) IFulltextSearchSvc theFulltextSvc) {
|
||||
return new SearchStrategyFactory(myDaoConfig, theFulltextSvc);
|
||||
return new SearchStrategyFactory(myStorageSettings, theFulltextSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ -756,8 +759,8 @@ public class JpaConfig {
|
||||
}
|
||||
|
||||
@Bean
|
||||
public SearchParameterDaoValidator searchParameterDaoValidator(FhirContext theFhirContext, DaoConfig theDaoConfig, ISearchParamRegistry theSearchParamRegistry) {
|
||||
return new SearchParameterDaoValidator(theFhirContext, theDaoConfig, theSearchParamRegistry);
|
||||
public SearchParameterDaoValidator searchParameterDaoValidator(FhirContext theFhirContext, JpaStorageSettings theStorageSettings, ISearchParamRegistry theSearchParamRegistry) {
|
||||
return new SearchParameterDaoValidator(theFhirContext, theStorageSettings, theSearchParamRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@ -771,7 +774,6 @@ public class JpaConfig {
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Bean
|
||||
public ITermReindexingSvc termReindexingSvc() {
|
||||
return new TermReindexingSvcImpl();
|
||||
@ -783,10 +785,12 @@ public class JpaConfig {
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IMdmLinkDao<JpaPid, MdmLink> mdmLinkDao(){
|
||||
public IMdmLinkDao<JpaPid, MdmLink> mdmLinkDao() {
|
||||
return new MdmLinkDaoJpaImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
IMdmLinkImplFactory<MdmLink> mdmLinkImplFactory() {return new JpaMdmLinkImplFactory();}
|
||||
IMdmLinkImplFactory<MdmLink> mdmLinkImplFactory() {
|
||||
return new JpaMdmLinkImplFactory();
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
@ -33,7 +33,6 @@ import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.search.ExceptionService;
|
||||
import ca.uhn.fhir.jpa.search.ISynchronousSearchSvc;
|
||||
@ -63,7 +62,7 @@ public class SearchConfig {
|
||||
public static final String CONTINUE_TASK = "continueTask";
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory;
|
||||
@Autowired
|
||||
@ -71,8 +70,6 @@ public class SearchConfig {
|
||||
@Autowired
|
||||
private HibernatePropertiesProvider myDialectProvider;
|
||||
@Autowired
|
||||
private ModelConfig myModelConfig;
|
||||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@ -115,7 +112,7 @@ public class SearchConfig {
|
||||
public ISearchCoordinatorSvc searchCoordinatorSvc() {
|
||||
return new SearchCoordinatorSvcImpl(
|
||||
myContext,
|
||||
myDaoConfig,
|
||||
myStorageSettings,
|
||||
myInterceptorBroadcaster,
|
||||
myHapiTransactionService,
|
||||
mySearchCacheSvc,
|
||||
@ -139,14 +136,13 @@ public class SearchConfig {
|
||||
|
||||
@Bean(name = ISearchBuilder.SEARCH_BUILDER_BEAN_NAME)
|
||||
@Scope("prototype")
|
||||
public ISearchBuilder newSearchBuilder(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType, DaoConfig theDaoConfig) {
|
||||
public ISearchBuilder newSearchBuilder(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType) {
|
||||
return new SearchBuilder(theDao,
|
||||
theResourceName,
|
||||
myDaoConfig,
|
||||
myStorageSettings,
|
||||
myEntityManagerFactory,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
myModelConfig,
|
||||
mySearchParamRegistry,
|
||||
myPartitionSettings,
|
||||
myInterceptorBroadcaster,
|
||||
@ -168,7 +164,7 @@ public class SearchConfig {
|
||||
myInterceptorBroadcaster,
|
||||
mySearchBuilderFactory,
|
||||
mySearchResultCacheSvc,
|
||||
myDaoConfig,
|
||||
myStorageSettings,
|
||||
mySearchCacheSvc,
|
||||
myPagingProvider
|
||||
);
|
||||
@ -184,7 +180,7 @@ public class SearchConfig {
|
||||
myInterceptorBroadcaster,
|
||||
mySearchBuilderFactory,
|
||||
mySearchResultCacheSvc,
|
||||
myDaoConfig,
|
||||
myStorageSettings,
|
||||
mySearchCacheSvc,
|
||||
myPagingProvider,
|
||||
exceptionService() // singleton
|
||||
|
@ -12,7 +12,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IJpaDao;
|
||||
@ -143,6 +143,7 @@ import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Objects.nonNull;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
@ -222,8 +223,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
@Autowired
|
||||
ExpungeService myExpungeService;
|
||||
@Autowired
|
||||
private DaoConfig myConfig;
|
||||
@Autowired
|
||||
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
||||
@Autowired
|
||||
private SearchParamWithInlineReferencesExtractor mySearchParamWithInlineReferencesExtractor;
|
||||
@ -357,8 +356,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
@Override
|
||||
public DaoConfig getConfig() {
|
||||
return myConfig;
|
||||
public JpaStorageSettings getStorageSettings() {
|
||||
return myStorageSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -385,7 +384,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
TagDefinition retVal = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key);
|
||||
|
||||
if (retVal == null) {
|
||||
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, () -> new HashMap<>());
|
||||
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, HashMap::new);
|
||||
retVal = resolvedTagDefinitions.get(key);
|
||||
|
||||
if (retVal == null) {
|
||||
@ -525,11 +524,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
public boolean isLogicalReference(IIdType theId) {
|
||||
return LogicalReferenceHelper.isLogicalReference(myConfig.getModelConfig(), theId);
|
||||
return LogicalReferenceHelper.isLogicalReference(myStorageSettings, theId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the resource has changed (either the contents or the tags)
|
||||
* Returns {@literal true} if the resource has changed (either the contents or the tags)
|
||||
*/
|
||||
protected EncodedResource populateResourceIntoEntity(TransactionDetails theTransactionDetails, RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, boolean thePerformIndexing) {
|
||||
if (theEntity.getResourceType() == null) {
|
||||
@ -545,7 +544,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
|
||||
if (thePerformIndexing) {
|
||||
|
||||
encoding = myConfig.getResourceEncoding();
|
||||
encoding = myStorageSettings.getResourceEncoding();
|
||||
|
||||
String resourceType = theEntity.getResourceType();
|
||||
|
||||
@ -559,7 +558,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
HashFunction sha256 = Hashing.sha256();
|
||||
HashCode hashCode;
|
||||
String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
if (getConfig().getInlineResourceTextBelowSize() > 0 && encodedResource.length() < getConfig().getInlineResourceTextBelowSize()) {
|
||||
if (getStorageSettings().getInlineResourceTextBelowSize() > 0 && encodedResource.length() < getStorageSettings().getInlineResourceTextBelowSize()) {
|
||||
resourceText = encodedResource;
|
||||
resourceBinary = null;
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
@ -591,8 +590,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
|
||||
}
|
||||
|
||||
boolean skipUpdatingTags = myConfig.isMassIngestionMode() && theEntity.isHasTags();
|
||||
skipUpdatingTags |= myConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
|
||||
boolean skipUpdatingTags = myStorageSettings.isMassIngestionMode() && theEntity.isHasTags();
|
||||
skipUpdatingTags |= myStorageSettings.getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.INLINE;
|
||||
|
||||
if (!skipUpdatingTags) {
|
||||
changed |= updateTags(theTransactionDetails, theRequest, theResource, theEntity);
|
||||
@ -600,7 +599,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
|
||||
} else {
|
||||
|
||||
theEntity.setHashSha256(null);
|
||||
if(nonNull(theEntity.getHashSha256())){
|
||||
theEntity.setHashSha256(null);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
resourceBinary = null;
|
||||
resourceText = null;
|
||||
encoding = ResourceEncodingEnum.DEL;
|
||||
@ -610,7 +613,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
if (thePerformIndexing && !changed) {
|
||||
if (theEntity.getId() == null) {
|
||||
changed = true;
|
||||
} else if (myConfig.isMassIngestionMode()) {
|
||||
} else if (myStorageSettings.isMassIngestionMode()) {
|
||||
|
||||
// Don't check existing - We'll rely on the SHA256 hash only
|
||||
|
||||
@ -704,7 +707,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
theExcludeElements.add("id");
|
||||
boolean inlineTagMode = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
|
||||
boolean inlineTagMode = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.INLINE;
|
||||
if (hasExtensions || inlineTagMode) {
|
||||
if (!inlineTagMode) {
|
||||
theExcludeElements.add(theResourceType + ".meta.profile");
|
||||
@ -938,7 +941,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
entity.setUpdated(theDeletedTimestampOrNull);
|
||||
entity.setNarrativeText(null);
|
||||
entity.setContentText(null);
|
||||
entity.setHashSha256(null);
|
||||
entity.setIndexStatus(INDEX_STATUS_INDEXED);
|
||||
changed = populateResourceIntoEntity(theTransactionDetails, theRequest, theResource, entity, true);
|
||||
|
||||
@ -1025,7 +1027,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
|
||||
}
|
||||
|
||||
if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) {
|
||||
if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myStorageSettings.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) {
|
||||
ourLog.debug("Resource {} has not changed", entity.getIdDt().toUnqualified().getValue());
|
||||
if (theResource != null) {
|
||||
myJpaStorageResourceParser.updateResourceMetadata(entity, theResource);
|
||||
@ -1148,7 +1150,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
ResourceTable entity = (ResourceTable) theEntity;
|
||||
|
||||
IBaseResource oldResource;
|
||||
if (getConfig().isMassIngestionMode()) {
|
||||
if (getStorageSettings().isMassIngestionMode()) {
|
||||
oldResource = null;
|
||||
} else {
|
||||
oldResource = myJpaStorageResourceParser.toResource(entity, false);
|
||||
@ -1179,7 +1181,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
historyEntity.setDeleted(null);
|
||||
|
||||
// Check if resource is the same
|
||||
ResourceEncodingEnum encoding = myConfig.getResourceEncoding();
|
||||
ResourceEncodingEnum encoding = myStorageSettings.getResourceEncoding();
|
||||
List<String> excludeElements = new ArrayList<>(8);
|
||||
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
||||
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
@ -1188,13 +1190,13 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
|
||||
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||
|
||||
if (!changed && myConfig.isSuppressUpdatesWithNoChange() && (historyEntity.getVersion() > 1)) {
|
||||
if (!changed && myStorageSettings.isSuppressUpdatesWithNoChange() && (historyEntity.getVersion() > 1)) {
|
||||
ourLog.debug("Resource {} has not changed", historyEntity.getIdDt().toUnqualified().getValue());
|
||||
myJpaStorageResourceParser.updateResourceMetadata(historyEntity, theResource);
|
||||
return historyEntity;
|
||||
}
|
||||
|
||||
if (getConfig().getInlineResourceTextBelowSize() > 0 && encodedResourceString.length() < getConfig().getInlineResourceTextBelowSize()) {
|
||||
if (getStorageSettings().getInlineResourceTextBelowSize() > 0 && encodedResourceString.length() < getStorageSettings().getInlineResourceTextBelowSize()) {
|
||||
populateEncodedResource(encodedResource, encodedResourceString, null, ResourceEncodingEnum.JSON);
|
||||
} else {
|
||||
populateEncodedResource(encodedResource, null, resourceBinary, encoding);
|
||||
@ -1256,7 +1258,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) {
|
||||
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
boolean versionedTags = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.VERSIONED;
|
||||
final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags);
|
||||
historyEntry.setEncoding(theChanged.getEncoding());
|
||||
historyEntry.setResource(theChanged.getResourceBinary());
|
||||
@ -1289,8 +1291,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
String requestId = getRequestId(theRequest, source);
|
||||
source = cleanProvenanceSourceUri(source);
|
||||
|
||||
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
boolean haveSource = isNotBlank(source) && myStorageSettings.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myStorageSettings.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
if (haveSource || haveRequestId) {
|
||||
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
||||
provenance.setResourceHistoryTable(historyEntry);
|
||||
@ -1307,7 +1309,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
private String getRequestId(RequestDetails theRequest, String theSource) {
|
||||
if (myConfig.isPreserveRequestIdInResourceBody()) {
|
||||
if (myStorageSettings.isPreserveRequestIdInResourceBody()) {
|
||||
return StringUtils.substringAfter(theSource, "#");
|
||||
}
|
||||
return theRequest != null ? theRequest.getRequestId() : null;
|
||||
@ -1462,7 +1464,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
continue;
|
||||
}
|
||||
|
||||
if (getConfig().isEnforceReferenceTargetTypes()) {
|
||||
if (getStorageSettings().isEnforceReferenceTargetTypes()) {
|
||||
for (IBase nextChild : values) {
|
||||
IBaseReference nextRef = (IBaseReference) nextChild;
|
||||
IIdType referencedId = nextRef.getReferenceElement();
|
||||
@ -1483,9 +1485,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
protected void validateMetaCount(int theMetaCount) {
|
||||
if (myConfig.getResourceMetaCountHardLimit() != null) {
|
||||
if (theMetaCount > myConfig.getResourceMetaCountHardLimit()) {
|
||||
throw new UnprocessableEntityException(Msg.code(932) + "Resource contains " + theMetaCount + " meta entries (tag/profile/security label), maximum is " + myConfig.getResourceMetaCountHardLimit());
|
||||
if (myStorageSettings.getResourceMetaCountHardLimit() != null) {
|
||||
if (theMetaCount > myStorageSettings.getResourceMetaCountHardLimit()) {
|
||||
throw new UnprocessableEntityException(Msg.code(932) + "Resource contains " + theMetaCount + " meta entries (tag/profile/security label), maximum is " + myStorageSettings.getResourceMetaCountHardLimit());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1525,7 +1527,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
throw new UnprocessableEntityException(Msg.code(933) + "Resource contains the 'subsetted' tag, and must not be stored as it may contain a subset of available data");
|
||||
}
|
||||
|
||||
if (getConfig().isEnforceReferenceTargetTypes()) {
|
||||
if (getStorageSettings().isEnforceReferenceTargetTypes()) {
|
||||
String resName = getContext().getResourceType(theResource);
|
||||
validateChildReferenceTargetTypes(theResource, resName);
|
||||
}
|
||||
@ -1540,8 +1542,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myConfig = theDaoConfig;
|
||||
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
public void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) {
|
||||
@ -1551,7 +1553,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||
} else {
|
||||
theEntity.setNarrativeText(parseNarrativeTextIntoWords(theResource));
|
||||
theEntity.setContentText(parseContentTextIntoWords(theContext, theResource));
|
||||
if (myDaoConfig.isAdvancedHSearchIndexing()) {
|
||||
if (myStorageSettings.isAdvancedHSearchIndexing()) {
|
||||
ExtendedHSearchIndexData hSearchIndexData = myFulltextSearchSvc.extractLuceneIndexData(theResource, theNewParams);
|
||||
theEntity.setLuceneIndexData(hSearchIndexData);
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
@ -268,7 +268,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
}
|
||||
}
|
||||
|
||||
if (getConfig().getResourceServerIdStrategy() == DaoConfig.IdStrategyEnum.UUID) {
|
||||
if (getStorageSettings().getResourceServerIdStrategy() == JpaStorageSettings.IdStrategyEnum.UUID) {
|
||||
theResource.setId(UUID.randomUUID().toString());
|
||||
theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE);
|
||||
}
|
||||
@ -376,11 +376,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
boolean createForPureNumericIds = true;
|
||||
createForcedIdIfNeeded(entity, resourceIdBeforeStorage, createForPureNumericIds);
|
||||
} else {
|
||||
boolean createForPureNumericIds = getConfig().getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ALPHANUMERIC;
|
||||
boolean createForPureNumericIds = getStorageSettings().getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC;
|
||||
createForcedIdIfNeeded(entity, resourceIdBeforeStorage, createForPureNumericIds);
|
||||
}
|
||||
} else {
|
||||
switch (getConfig().getResourceClientIdStrategy()) {
|
||||
switch (getStorageSettings().getResourceClientIdStrategy()) {
|
||||
case NOT_ALLOWED:
|
||||
case ALPHANUMERIC:
|
||||
break;
|
||||
@ -475,15 +475,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
}
|
||||
|
||||
void validateResourceIdCreation(T theResource, RequestDetails theRequest) {
|
||||
DaoConfig.ClientIdStrategyEnum strategy = getConfig().getResourceClientIdStrategy();
|
||||
JpaStorageSettings.ClientIdStrategyEnum strategy = getStorageSettings().getResourceClientIdStrategy();
|
||||
|
||||
if (strategy == DaoConfig.ClientIdStrategyEnum.NOT_ALLOWED) {
|
||||
if (strategy == JpaStorageSettings.ClientIdStrategyEnum.NOT_ALLOWED) {
|
||||
if (!isSystemRequest(theRequest)) {
|
||||
throw new ResourceNotFoundException(Msg.code(959) + getMessageSanitized("failedToCreateWithClientAssignedIdNotAllowed", theResource.getIdElement().getIdPart()));
|
||||
}
|
||||
}
|
||||
|
||||
if (strategy == DaoConfig.ClientIdStrategyEnum.ALPHANUMERIC) {
|
||||
if (strategy == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) {
|
||||
if (theResource.getIdElement().isIdPartValidLong()) {
|
||||
throw new InvalidRequestException(Msg.code(960) + getMessageSanitized("failedToCreateWithClientAssignedNumericId", theResource.getIdElement().getIdPart()));
|
||||
}
|
||||
@ -648,7 +648,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
Set<JpaPid> resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequest, null);
|
||||
|
||||
if (resourceIds.size() > 1) {
|
||||
if (!getConfig().isAllowMultipleDelete()) {
|
||||
if (!getStorageSettings().isAllowMultipleDelete()) {
|
||||
throw new PreconditionFailedException(Msg.code(962) + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "DELETE", theUrl, resourceIds.size()));
|
||||
}
|
||||
}
|
||||
@ -681,6 +681,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
|
||||
// Perform delete
|
||||
|
||||
preDelete(resourceToDelete, entity, theRequest);
|
||||
|
||||
updateEntityForDelete(theRequest, transactionDetails, entity);
|
||||
resourceToDelete.setId(entity.getIdDt());
|
||||
|
||||
@ -718,7 +720,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
}
|
||||
|
||||
private void validateDeleteEnabled() {
|
||||
if (!getConfig().isDeleteEnabled()) {
|
||||
if (!getStorageSettings().isDeleteEnabled()) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "deleteBlockedBecauseDisabled");
|
||||
throw new PreconditionFailedException(Msg.code(966) + msg);
|
||||
}
|
||||
@ -832,7 +834,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
}
|
||||
|
||||
private void validateExpungeEnabled() {
|
||||
if (!getConfig().isExpungeEnabled()) {
|
||||
if (!getStorageSettings().isExpungeEnabled()) {
|
||||
throw new MethodNotAllowedException(Msg.code(968) + "$expunge is not enabled on this server");
|
||||
}
|
||||
}
|
||||
@ -951,7 +953,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
return;
|
||||
}
|
||||
|
||||
if (getConfig().isMarkResourcesForReindexingUponSearchParameterChange()) {
|
||||
if (getStorageSettings().isMarkResourcesForReindexingUponSearchParameterChange()) {
|
||||
|
||||
ReindexJobParameters params = new ReindexJobParameters();
|
||||
|
||||
@ -1039,7 +1041,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
}
|
||||
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest, transactionDetails);
|
||||
boolean nonVersionedTags = myDaoConfig.getTagStorageMode() != DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
boolean nonVersionedTags = myStorageSettings.getTagStorageMode() != JpaStorageSettings.TagStorageModeEnum.VERSIONED;
|
||||
if (latestVersion.getVersion() != entity.getVersion() || nonVersionedTags) {
|
||||
doMetaDelete(theMetaDel, entity, theRequest, transactionDetails);
|
||||
} else {
|
||||
@ -1090,7 +1092,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
@PostConstruct
|
||||
@Override
|
||||
public void start() {
|
||||
assert getConfig() != null;
|
||||
assert getStorageSettings() != null;
|
||||
|
||||
RuntimeResourceDefinition def = getContext().getResourceDefinition(myResourceType);
|
||||
myResourceName = def.getName();
|
||||
@ -1423,7 +1425,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
if (theParams.getSearchContainedMode() == SearchContainedModeEnum.BOTH) {
|
||||
throw new MethodNotAllowedException(Msg.code(983) + "Contained mode 'both' is not currently supported");
|
||||
}
|
||||
if (theParams.getSearchContainedMode() != SearchContainedModeEnum.FALSE && !myModelConfig.isIndexOnContainedResources()) {
|
||||
if (theParams.getSearchContainedMode() != SearchContainedModeEnum.FALSE && !myStorageSettings.isIndexOnContainedResources()) {
|
||||
throw new MethodNotAllowedException(Msg.code(984) + "Searching with _contained mode enabled is not enabled on this server");
|
||||
}
|
||||
|
||||
@ -1478,10 +1480,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
if (theRequest != null) {
|
||||
|
||||
if (theRequest.isSubRequest()) {
|
||||
Integer max = getConfig().getMaximumSearchResultCountInTransaction();
|
||||
Integer max = getStorageSettings().getMaximumSearchResultCountInTransaction();
|
||||
if (max != null) {
|
||||
Validate.inclusiveBetween(1, Integer.MAX_VALUE, max, "Maximum search result count in transaction ust be a positive integer");
|
||||
theParams.setLoadSynchronousUpTo(getConfig().getMaximumSearchResultCountInTransaction());
|
||||
theParams.setLoadSynchronousUpTo(getStorageSettings().getMaximumSearchResultCountInTransaction());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1515,9 +1517,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
return myTransactionService.execute(theRequest, transactionDetails, tx -> {
|
||||
|
||||
if (theParams.getLoadSynchronousUpTo() != null) {
|
||||
theParams.setLoadSynchronousUpTo(Math.min(getConfig().getInternalSynchronousSearchSize(), theParams.getLoadSynchronousUpTo()));
|
||||
theParams.setLoadSynchronousUpTo(Math.min(getStorageSettings().getInternalSynchronousSearchSize(), theParams.getLoadSynchronousUpTo()));
|
||||
} else {
|
||||
theParams.setLoadSynchronousUpTo(getConfig().getInternalSynchronousSearchSize());
|
||||
theParams.setLoadSynchronousUpTo(getStorageSettings().getInternalSynchronousSearchSize());
|
||||
}
|
||||
|
||||
ISearchBuilder builder = mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
|
||||
@ -1626,7 +1628,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
Runnable onRollback = () -> theResource.getIdElement().setValue(id);
|
||||
|
||||
// Execute the update in a retryable transaction
|
||||
if (myDaoConfig.isUpdateWithHistoryRewriteEnabled() && theRequest != null && theRequest.isRewriteHistory()) {
|
||||
if (myStorageSettings.isUpdateWithHistoryRewriteEnabled() && theRequest != null && theRequest.isRewriteHistory()) {
|
||||
return myTransactionService.execute(theRequest, theTransactionDetails, tx -> doUpdateWithHistoryRewrite(theResource, theRequest, theTransactionDetails), onRollback);
|
||||
} else {
|
||||
return myTransactionService.execute(theRequest, theTransactionDetails, tx -> doUpdate(theResource, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theRequest, theTransactionDetails), onRollback);
|
||||
@ -1768,7 +1770,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
// Validate that there are no resources pointing to the candidate that
|
||||
// would prevent deletion
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
if (getConfig().isEnforceReferentialIntegrityOnDelete()) {
|
||||
if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) {
|
||||
myDeleteConflictService.validateOkToDelete(deleteConflicts, entity, true, theRequest, new TransactionDetails());
|
||||
}
|
||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
@ -1837,7 +1839,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||
|
||||
private void validateGivenIdIsAppropriateToRetrieveResource(IIdType theId, BaseHasResource entity) {
|
||||
if (entity.getForcedId() != null) {
|
||||
if (getConfig().getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY) {
|
||||
if (getStorageSettings().getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
|
||||
if (theId.isIdPartValidLong()) {
|
||||
// This means that the resource with the given numeric ID exists, but it has a "forced ID", meaning that
|
||||
// as far as the outside world is concerned, the given ID doesn't exist (it's just an internal pointer
|
||||
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.dao;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
@ -102,11 +102,11 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
||||
}
|
||||
|
||||
private void validateExpungeEnabled(ExpungeOptions theExpungeOptions) {
|
||||
if (!getConfig().isExpungeEnabled()) {
|
||||
if (!getStorageSettings().isExpungeEnabled()) {
|
||||
throw new MethodNotAllowedException(Msg.code(2080) + "$expunge is not enabled on this server");
|
||||
}
|
||||
|
||||
if (theExpungeOptions.isExpungeEverything() && !getConfig().isAllowMultipleDelete()) {
|
||||
if (theExpungeOptions.isExpungeEverything() && !getStorageSettings().isAllowMultipleDelete()) {
|
||||
throw new MethodNotAllowedException(Msg.code(2081) + "Multiple delete is not enabled on this server");
|
||||
}
|
||||
}
|
||||
@ -214,7 +214,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().map(t->t.getId()).collect(Collectors.toList());
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {
|
||||
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) {
|
||||
preFetchIndexes(entityIds, "searchParamPresence", "mySearchParamPresents", null);
|
||||
}
|
||||
|
||||
@ -280,8 +280,8 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DaoConfig getConfig() {
|
||||
return myDaoConfig;
|
||||
protected JpaStorageSettings getStorageSettings() {
|
||||
return myStorageSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -290,8 +290,8 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -20,7 +20,6 @@ package ca.uhn.fhir.jpa.dao;
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchClauseBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchIndexExtractor;
|
||||
@ -31,7 +31,6 @@ import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.search.IHSearchSortHelper;
|
||||
import ca.uhn.fhir.jpa.dao.search.LastNOperation;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
|
||||
@ -84,7 +83,11 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FulltextSearchSvcImpl.class);
|
||||
private static final int DEFAULT_MAX_NON_PAGED_SIZE = 500;
|
||||
|
||||
final private ExtendedHSearchSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedHSearchSearchBuilder();
|
||||
@Autowired
|
||||
ISearchParamExtractor mySearchParamExtractor;
|
||||
@Autowired
|
||||
IIdHelperService myIdHelperService;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
@Autowired
|
||||
@ -94,20 +97,9 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
ISearchParamExtractor mySearchParamExtractor;
|
||||
@Autowired
|
||||
IIdHelperService myIdHelperService;
|
||||
|
||||
@Autowired
|
||||
ModelConfig myModelConfig;
|
||||
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private IHSearchSortHelper myExtendedFulltextSortHelper;
|
||||
|
||||
final private ExtendedHSearchSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedHSearchSearchBuilder();
|
||||
|
||||
@Autowired(required = false)
|
||||
private IHSearchEventListener myHSearchEventListener;
|
||||
|
||||
@ -120,12 +112,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedHSearchIndexData extractLuceneIndexData(IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
|
||||
String resourceType = myFhirContext.getResourceType(theResource);
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(resourceType);
|
||||
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
|
||||
myDaoConfig, myFhirContext, activeSearchParams, mySearchParamExtractor, myModelConfig);
|
||||
return extractor.extract(theResource,theNewParams);
|
||||
myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
|
||||
return extractor.extract(theResource, theNewParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -134,7 +127,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
// keep this in sync with the guts of doSearch
|
||||
boolean requiresHibernateSearchAccess = myParams.containsKey(Constants.PARAM_CONTENT) || myParams.containsKey(Constants.PARAM_TEXT) || myParams.isLastN();
|
||||
|
||||
requiresHibernateSearchAccess |= myDaoConfig.isAdvancedHSearchIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams);
|
||||
requiresHibernateSearchAccess |= myStorageSettings.isAdvancedHSearchIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams);
|
||||
|
||||
return requiresHibernateSearchAccess;
|
||||
}
|
||||
@ -186,10 +179,10 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
|
||||
|
||||
private SearchQueryOptionsStep<?, Long, SearchLoadingOptionsStep, ?, ?> getSearchQueryOptionsStep(
|
||||
String theResourceType, SearchParameterMap theParams, IResourcePersistentId theReferencingPid) {
|
||||
String theResourceType, SearchParameterMap theParams, IResourcePersistentId theReferencingPid) {
|
||||
|
||||
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
|
||||
var query= getSearchSession().search(ResourceTable.class)
|
||||
var query = getSearchSession().search(ResourceTable.class)
|
||||
// The document id is the PK which is pid. We use this instead of _myId to avoid fetching the doc body.
|
||||
.select(
|
||||
// adapt the String docRef.id() to the Long that it really is.
|
||||
@ -203,7 +196,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
|
||||
if (theParams.getSort() != null) {
|
||||
query.sort(
|
||||
f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType) );
|
||||
f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType));
|
||||
|
||||
// indicate parameter was processed
|
||||
theParams.setSort(null);
|
||||
@ -216,7 +209,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
private PredicateFinalStep buildWhereClause(SearchPredicateFactory f, String theResourceType,
|
||||
SearchParameterMap theParams, IResourcePersistentId theReferencingPid) {
|
||||
return f.bool(b -> {
|
||||
ExtendedHSearchClauseBuilder builder = new ExtendedHSearchClauseBuilder(myFhirContext, myModelConfig, b, f);
|
||||
ExtendedHSearchClauseBuilder builder = new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f);
|
||||
|
||||
/*
|
||||
* Handle _content parameter (resource body content)
|
||||
@ -249,7 +242,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
/*
|
||||
* Handle other supported parameters
|
||||
*/
|
||||
if (myDaoConfig.isAdvancedHSearchIndexing() && theParams.getEverythingMode() == null) {
|
||||
if (myStorageSettings.isAdvancedHSearchIndexing() && theParams.getEverythingMode() == null) {
|
||||
myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses(builder, theResourceType, theParams, mySearchParamRegistry);
|
||||
}
|
||||
//DROP EARLY HERE IF BOOL IS EMPTY?
|
||||
@ -332,7 +325,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
validateHibernateSearchIsEnabled();
|
||||
ensureElastic();
|
||||
|
||||
ValueSetAutocompleteSearch autocomplete = new ValueSetAutocompleteSearch(myFhirContext, myModelConfig, getSearchSession());
|
||||
ValueSetAutocompleteSearch autocomplete = new ValueSetAutocompleteSearch(myFhirContext, myStorageSettings, getSearchSession());
|
||||
|
||||
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
|
||||
return autocomplete.search(theOptions);
|
||||
@ -340,13 +333,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
|
||||
/**
|
||||
* Throws an error if configured with Lucene.
|
||||
*
|
||||
* <p>
|
||||
* Some features only work with Elasticsearch.
|
||||
* Lastn and the autocomplete search use nested aggregations which are Elasticsearch-only
|
||||
*/
|
||||
private void ensureElastic() {
|
||||
try {
|
||||
getSearchSession().scope( ResourceTable.class )
|
||||
getSearchSession().scope(ResourceTable.class)
|
||||
.aggregation()
|
||||
.extension(ElasticsearchExtension.get());
|
||||
} catch (SearchException e) {
|
||||
@ -360,7 +353,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
public List<IResourcePersistentId> lastN(SearchParameterMap theParams, Integer theMaximumResults) {
|
||||
ensureElastic();
|
||||
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
|
||||
List<Long> pidList = new LastNOperation(getSearchSession(), myFhirContext, myModelConfig, mySearchParamRegistry)
|
||||
List<Long> pidList = new LastNOperation(getSearchSession(), myFhirContext, myStorageSettings, mySearchParamRegistry)
|
||||
.executeLastN(theParams, theMaximumResults);
|
||||
return convertLongsToResourcePersistentIds(pidList);
|
||||
}
|
||||
@ -384,7 +377,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
// order resource projections as per thePids
|
||||
ArrayList<Long> pidList = new ArrayList<>(thePids);
|
||||
List<ExtendedHSearchResourceProjection> orderedAsPidsResourceDataList = rawResourceDataList.stream()
|
||||
.sorted( Ordering.explicit(pidList).onResultOf(ExtendedHSearchResourceProjection::getPid) ).collect( Collectors.toList() );
|
||||
.sorted(Ordering.explicit(pidList).onResultOf(ExtendedHSearchResourceProjection::getPid)).collect(Collectors.toList());
|
||||
|
||||
return resourceProjectionsToResources(orderedAsPidsResourceDataList);
|
||||
}
|
||||
@ -400,7 +393,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
|
||||
|
||||
private CompositeProjectionOptionsStep<?, ExtendedHSearchResourceProjection> buildResourceSelectClause(
|
||||
SearchProjectionFactory<EntityReference, ResourceTable> f) {
|
||||
SearchProjectionFactory<EntityReference, ResourceTable> f) {
|
||||
return f.composite(
|
||||
ExtendedHSearchResourceProjection::new,
|
||||
f.field("myId", Long.class),
|
||||
@ -433,12 +426,12 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
|
||||
|
||||
var query = getSearchSession().search(ResourceTable.class)
|
||||
.select(this::buildResourceSelectClause)
|
||||
.where(f -> buildWhereClause(f, theResourceType, theParams, null));
|
||||
.select(this::buildResourceSelectClause)
|
||||
.where(f -> buildWhereClause(f, theResourceType, theParams, null));
|
||||
|
||||
if (theParams.getSort() != null) {
|
||||
query.sort(
|
||||
f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType) );
|
||||
f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType));
|
||||
}
|
||||
|
||||
List<ExtendedHSearchResourceProjection> extendedLuceneResourceProjections = query.fetchHits(offset, limit);
|
||||
|
@ -99,7 +99,7 @@ public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapi
|
||||
ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion,
|
||||
theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry);
|
||||
|
||||
if (getConfig().isLastNEnabled()) {
|
||||
if (getStorageSettings().isLastNEnabled()) {
|
||||
if (!retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
// Update indexes here for LastN operation.
|
||||
|
@ -1,8 +1,8 @@
|
||||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoSearchParameter;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.dao.validation.SearchParameterDaoValidator;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.hapi.converters.canonical.VersionCanonicalizer;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -95,7 +95,7 @@ public class JpaResourceDaoValueSet<T extends IBaseResource> extends BaseHapiFhi
|
||||
|
||||
if (isAutocompleteExtension) {
|
||||
// this is a funky extension for NIH. Do our own thing and return.
|
||||
ValueSetAutocompleteOptions options = ValueSetAutocompleteOptions.validateAndParseOptions(myDaoConfig, theContext, theFilter, theCount, theId, theUrl, theValueSet);
|
||||
ValueSetAutocompleteOptions options = ValueSetAutocompleteOptions.validateAndParseOptions(myStorageSettings, theContext, theFilter, theCount, theId, theUrl, theValueSet);
|
||||
if (myFulltextSearch == null || myFulltextSearch.isDisabled()) {
|
||||
throw new InvalidRequestException(Msg.code(2083) + " Autocomplete is not supported on this server, as the fulltext search service is not configured.");
|
||||
} else {
|
||||
@ -119,7 +119,7 @@ public class JpaResourceDaoValueSet<T extends IBaseResource> extends BaseHapiFhi
|
||||
throw new InvalidRequestException(Msg.code(1134) + "$expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.");
|
||||
}
|
||||
|
||||
ValueSetExpansionOptions options = createValueSetExpansionOptions(myDaoConfig, theOffset, theCount, theIncludeHierarchy, theFilter, theDisplayLanguage);
|
||||
ValueSetExpansionOptions options = createValueSetExpansionOptions(myStorageSettings, theOffset, theCount, theIncludeHierarchy, theFilter, theDisplayLanguage);
|
||||
|
||||
IValidationSupport.ValueSetExpansionOutcome outcome;
|
||||
if (haveId) {
|
||||
@ -235,7 +235,7 @@ public class JpaResourceDaoValueSet<T extends IBaseResource> extends BaseHapiFhi
|
||||
boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry);
|
||||
|
||||
if (getConfig().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
|
||||
if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
ValueSet valueSet = myVersionCanonicalizer.valueSetToCanonical(theResource);
|
||||
myTerminologySvc.storeTermValueSet(retVal, valueSet);
|
||||
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
@ -59,7 +59,6 @@ import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseMetaType;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@ -82,7 +81,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
@Autowired
|
||||
@ -115,7 +114,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
||||
resourceBytes = history.getResource();
|
||||
resourceText = history.getResourceTextVc();
|
||||
resourceEncoding = history.getEncoding();
|
||||
switch (myDaoConfig.getTagStorageMode()) {
|
||||
switch (myStorageSettings.getTagStorageMode()) {
|
||||
case VERSIONED:
|
||||
default:
|
||||
if (history.isHasTags()) {
|
||||
@ -158,7 +157,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
||||
resourceBytes = history.getResource();
|
||||
resourceEncoding = history.getEncoding();
|
||||
resourceText = history.getResourceTextVc();
|
||||
switch (myDaoConfig.getTagStorageMode()) {
|
||||
switch (myStorageSettings.getTagStorageMode()) {
|
||||
case VERSIONED:
|
||||
case NON_VERSIONED:
|
||||
if (resource.isHasTags()) {
|
||||
@ -183,7 +182,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
||||
version = view.getVersion();
|
||||
provenanceRequestId = view.getProvenanceRequestId();
|
||||
provenanceSourceUri = view.getProvenanceSourceUri();
|
||||
switch (myDaoConfig.getTagStorageMode()) {
|
||||
switch (myStorageSettings.getTagStorageMode()) {
|
||||
case VERSIONED:
|
||||
case NON_VERSIONED:
|
||||
if (theTagList != null) {
|
||||
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||
@ -52,7 +52,7 @@ public class ObservationLastNIndexPersistSvc {
|
||||
private IElasticsearchSvc myElasticsearchSvc;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myConfig;
|
||||
private JpaStorageSettings myConfig;
|
||||
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.dao;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
@ -92,7 +92,7 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
@ -169,7 +169,7 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
||||
for (JpaPid next : outcome) {
|
||||
foundIds.add(next.getAssociatedResourceId().toUnqualifiedVersionless().getValue());
|
||||
theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next);
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY || !next.getAssociatedResourceId().isIdPartValidLong()) {
|
||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY || !next.getAssociatedResourceId().isIdPartValidLong()) {
|
||||
idsToPreFetch.add(next.getId());
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user