JPA config refactoring (#4545)
* Interceptor rework * Work on config refactor * Fixes * Docs tweaks * Address review comments * Test tweak * Test fixes * Try to fix tests * Test tweaks
This commit is contained in:
parent
57cd5299ee
commit
eea6193df9
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -27,51 +27,6 @@ import java.util.function.Predicate;
|
|||
|
||||
public interface IBaseInterceptorService<POINTCUT extends IPointcut> extends IBaseInterceptorBroadcaster<POINTCUT> {
|
||||
|
||||
/**
|
||||
* Register an interceptor that will be used in a {@link ThreadLocal} context.
|
||||
* This means that events will only be broadcast to the given interceptor if
|
||||
* they were fired from the current thread.
|
||||
* <p>
|
||||
* Note that it is almost always desirable to call this method with a
|
||||
* try-finally statement that removes the interceptor afterwards, since
|
||||
* this can lead to memory leakage, poor performance due to ever-increasing
|
||||
* numbers of interceptors, etc.
|
||||
* </p>
|
||||
* <p>
|
||||
* Note that most methods such as {@link #getAllRegisteredInterceptors()} and
|
||||
* {@link #unregisterAllInterceptors()} do not affect thread local interceptors
|
||||
* as they are kept in a separate list.
|
||||
* </p>
|
||||
* <p>
|
||||
* ThreadLocal interceptors are now disabled by default as of HAPI FHIR 6.2.0 and must be manually
|
||||
* enabled by calling {@link ca.uhn.fhir.interceptor.executor.BaseInterceptorService#setThreadlocalInvokersEnabled(boolean)}.
|
||||
* They are now deprecated. Registering a threadlocal interceptor without enabling this feature will
|
||||
* result in a {@link IllegalArgumentException}.
|
||||
* </p>
|
||||
*
|
||||
* @param theInterceptor The interceptor
|
||||
* @return Returns <code>true</code> if at least one valid hook method was found on this interceptor
|
||||
* @deprecated Threadlocal interceptors have been deprecated as of HAPI FHIR 6.2.0 and will be removed in a future release due to lack of use. If you feel that this is a bad decision, please speak up on the HAPI FHIR mailing list.
|
||||
*/
|
||||
@Deprecated
|
||||
boolean registerThreadLocalInterceptor(Object theInterceptor);
|
||||
|
||||
/**
|
||||
* Unregisters a ThreadLocal interceptor
|
||||
* <p>
|
||||
* ThreadLocal interceptors are now disabled by default as of HAPI FHIR 6.2.0 and must be manually
|
||||
* enabled by calling {@link ca.uhn.fhir.interceptor.executor.BaseInterceptorService#setThreadlocalInvokersEnabled(boolean)}.
|
||||
* They are now deprecated. Registering a threadlocal interceptor without enabling this feature will
|
||||
* result in a {@link IllegalArgumentException}.
|
||||
* </p>
|
||||
*
|
||||
* @param theInterceptor The interceptor
|
||||
* @see #registerThreadLocalInterceptor(Object)
|
||||
* @deprecated Threadlocal interceptors have been deprecated as of HAPI FHIR 6.2.0 and will be removed in a future release due to lack of use. If you feel that this is a bad decision, please speak up on the HAPI FHIR mailing list.
|
||||
*/
|
||||
@Deprecated
|
||||
void unregisterThreadLocalInterceptor(Object theInterceptor);
|
||||
|
||||
/**
|
||||
* Register an interceptor. This method has no effect if the given interceptor is already registered.
|
||||
*
|
||||
|
@ -94,8 +49,7 @@ public interface IBaseInterceptorService<POINTCUT extends IPointcut> extends IBa
|
|||
List<Object> getAllRegisteredInterceptors();
|
||||
|
||||
/**
|
||||
* Unregisters all registered interceptors. Note that this method does not unregister
|
||||
* any {@link #registerThreadLocalInterceptor(Object) thread local interceptors}.
|
||||
* Unregisters all registered interceptors.
|
||||
*/
|
||||
void unregisterAllInterceptors();
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@ import ca.uhn.fhir.util.ReflectionUtil;
|
|||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Multimaps;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
|
@ -51,32 +50,33 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
// TODO: JA maybe add an enummap for pointcuts registered?
|
||||
public abstract class BaseInterceptorService<POINTCUT extends IPointcut> implements IBaseInterceptorService<POINTCUT>, IBaseInterceptorBroadcaster<POINTCUT> {
|
||||
public abstract class BaseInterceptorService<POINTCUT extends Enum<POINTCUT> & IPointcut> implements IBaseInterceptorService<POINTCUT>, IBaseInterceptorBroadcaster<POINTCUT> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(BaseInterceptorService.class);
|
||||
private final List<Object> myInterceptors = new ArrayList<>();
|
||||
private final ListMultimap<POINTCUT, BaseInvoker> myGlobalInvokers = ArrayListMultimap.create();
|
||||
private final ListMultimap<POINTCUT, BaseInvoker> myAnonymousInvokers = ArrayListMultimap.create();
|
||||
private final Object myRegistryMutex = new Object();
|
||||
private final ThreadLocal<ListMultimap<POINTCUT, BaseInvoker>> myThreadlocalInvokers = new ThreadLocal<>();
|
||||
private final Class<POINTCUT> myPointcutType;
|
||||
private volatile EnumSet<POINTCUT> myRegisteredPointcuts;
|
||||
private String myName;
|
||||
private boolean myThreadlocalInvokersEnabled = false;
|
||||
private boolean myWarnOnInterceptorWithNoHooks = true;
|
||||
|
||||
/**
|
||||
* Constructor which uses a default name of "default"
|
||||
*/
|
||||
public BaseInterceptorService() {
|
||||
this("default");
|
||||
public BaseInterceptorService(Class<POINTCUT> thePointcutType) {
|
||||
this(thePointcutType, "default");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -84,9 +84,11 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
*
|
||||
* @param theName The name for this registry (useful for troubleshooting)
|
||||
*/
|
||||
public BaseInterceptorService(String theName) {
|
||||
public BaseInterceptorService(Class<POINTCUT> thePointcutType, String theName) {
|
||||
super();
|
||||
myName = theName;
|
||||
myPointcutType = thePointcutType;
|
||||
rebuildRegisteredPointcutSet();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -96,23 +98,6 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
myWarnOnInterceptorWithNoHooks = theWarnOnInterceptorWithNoHooks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are threadlocal interceptors enabled on this registry (defaults to false)
|
||||
*/
|
||||
public boolean isThreadlocalInvokersEnabled() {
|
||||
return myThreadlocalInvokersEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are threadlocal interceptors enabled on this registry (defaults to false)
|
||||
*
|
||||
* @deprecated ThreadLocal interceptors are deprecated as of HAPI FHIR 6.2.0 and will be removed in a future release.
|
||||
*/
|
||||
@Deprecated
|
||||
public void setThreadlocalInvokersEnabled(boolean theThreadlocalInvokersEnabled) {
|
||||
myThreadlocalInvokersEnabled = theThreadlocalInvokersEnabled;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
List<Object> getGlobalInterceptorsForUnitTest() {
|
||||
return myInterceptors;
|
||||
|
@ -131,14 +116,15 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
if (!isInterceptorAlreadyRegistered(theInterceptor)) {
|
||||
myInterceptors.add(theInterceptor);
|
||||
}
|
||||
|
||||
rebuildRegisteredPointcutSet();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Object> getAllRegisteredInterceptors() {
|
||||
synchronized (myRegistryMutex) {
|
||||
List<Object> retVal = new ArrayList<>();
|
||||
retVal.addAll(myInterceptors);
|
||||
List<Object> retVal = new ArrayList<>(myInterceptors);
|
||||
return Collections.unmodifiableList(retVal);
|
||||
}
|
||||
}
|
||||
|
@ -156,14 +142,17 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
@Override
|
||||
public void unregisterInterceptors(@Nullable Collection<?> theInterceptors) {
|
||||
if (theInterceptors != null) {
|
||||
new ArrayList<>(theInterceptors).forEach(t -> unregisterInterceptor(t));
|
||||
// We construct a new list before iterating because the service's internal
|
||||
// interceptor lists get passed into this method, and we get concurrent
|
||||
// modification errors if we modify them at the same time as we iterate them
|
||||
new ArrayList<>(theInterceptors).forEach(this::unregisterInterceptor);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerInterceptors(@Nullable Collection<?> theInterceptors) {
|
||||
if (theInterceptors != null) {
|
||||
theInterceptors.forEach(t -> registerInterceptor(t));
|
||||
theInterceptors.forEach(this::registerInterceptor);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -187,37 +176,11 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
unregisterInterceptor(nextInvoker.getValue().getInterceptor());
|
||||
}
|
||||
}
|
||||
|
||||
rebuildRegisteredPointcutSet();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerThreadLocalInterceptor(Object theInterceptor) {
|
||||
Validate.isTrue (myThreadlocalInvokersEnabled, "Thread local interceptors are not enabled on this server");
|
||||
ListMultimap<POINTCUT, BaseInvoker> invokers = getThreadLocalInvokerMultimap();
|
||||
scanInterceptorAndAddToInvokerMultimap(theInterceptor, invokers);
|
||||
return !invokers.isEmpty();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unregisterThreadLocalInterceptor(Object theInterceptor) {
|
||||
Validate.isTrue (myThreadlocalInvokersEnabled, "Thread local interceptors are not enabled on this server");
|
||||
ListMultimap<POINTCUT, BaseInvoker> invokers = getThreadLocalInvokerMultimap();
|
||||
invokers.entries().removeIf(t -> t.getValue().getInterceptor() == theInterceptor);
|
||||
if (invokers.isEmpty()) {
|
||||
myThreadlocalInvokers.remove();
|
||||
}
|
||||
}
|
||||
|
||||
private ListMultimap<POINTCUT, BaseInvoker> getThreadLocalInvokerMultimap() {
|
||||
ListMultimap<POINTCUT, BaseInvoker> invokers = myThreadlocalInvokers.get();
|
||||
if (invokers == null) {
|
||||
invokers = Multimaps.synchronizedListMultimap(ArrayListMultimap.create());
|
||||
myThreadlocalInvokers.set(invokers);
|
||||
}
|
||||
return invokers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerInterceptor(Object theInterceptor) {
|
||||
synchronized (myRegistryMutex) {
|
||||
|
@ -238,10 +201,19 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
myInterceptors.add(theInterceptor);
|
||||
sortByOrderAnnotation(myInterceptors);
|
||||
|
||||
rebuildRegisteredPointcutSet();
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private void rebuildRegisteredPointcutSet() {
|
||||
EnumSet<POINTCUT> registeredPointcuts = EnumSet.noneOf(myPointcutType);
|
||||
registeredPointcuts.addAll(myAnonymousInvokers.keySet());
|
||||
registeredPointcuts.addAll(myGlobalInvokers.keySet());
|
||||
myRegisteredPointcuts = registeredPointcuts;
|
||||
}
|
||||
|
||||
private boolean isInterceptorAlreadyRegistered(Object theInterceptor) {
|
||||
for (Object next : myInterceptors) {
|
||||
if (next == theInterceptor) {
|
||||
|
@ -257,6 +229,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
boolean removed = myInterceptors.removeIf(t -> t == theInterceptor);
|
||||
removed |= myGlobalInvokers.entries().removeIf(t -> t.getValue().getInterceptor() == theInterceptor);
|
||||
removed |= myAnonymousInvokers.entries().removeIf(t -> t.getValue().getInterceptor() == theInterceptor);
|
||||
rebuildRegisteredPointcutSet();
|
||||
return removed;
|
||||
}
|
||||
}
|
||||
|
@ -286,14 +259,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
|
||||
@Override
|
||||
public boolean hasHooks(POINTCUT thePointcut) {
|
||||
return myGlobalInvokers.containsKey(thePointcut)
|
||||
|| myAnonymousInvokers.containsKey(thePointcut)
|
||||
|| hasThreadLocalHooks(thePointcut);
|
||||
}
|
||||
|
||||
private boolean hasThreadLocalHooks(POINTCUT thePointcut) {
|
||||
ListMultimap<POINTCUT, BaseInvoker> hooks = myThreadlocalInvokersEnabled ? myThreadlocalInvokers.get() : null;
|
||||
return hooks != null && hooks.containsKey(thePointcut);
|
||||
return myRegisteredPointcuts.contains(thePointcut);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -352,12 +318,6 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
List<BaseInvoker> globalInvokers = myGlobalInvokers.get(thePointcut);
|
||||
List<BaseInvoker> anonymousInvokers = myAnonymousInvokers.get(thePointcut);
|
||||
List<BaseInvoker> threadLocalInvokers = null;
|
||||
if (myThreadlocalInvokersEnabled) {
|
||||
ListMultimap<POINTCUT, BaseInvoker> pointcutToInvokers = myThreadlocalInvokers.get();
|
||||
if (pointcutToInvokers != null) {
|
||||
threadLocalInvokers = pointcutToInvokers.get(thePointcut);
|
||||
}
|
||||
}
|
||||
invokers = union(globalInvokers, anonymousInvokers, threadLocalInvokers);
|
||||
}
|
||||
|
||||
|
@ -368,7 +328,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
* First argument must be the global invoker list!!
|
||||
*/
|
||||
@SafeVarargs
|
||||
private final List<BaseInvoker> union(List<BaseInvoker>... theInvokersLists) {
|
||||
private List<BaseInvoker> union(List<BaseInvoker>... theInvokersLists) {
|
||||
List<BaseInvoker> haveOne = null;
|
||||
boolean haveMultiple = false;
|
||||
for (List<BaseInvoker> nextInvokerList : theInvokersLists) {
|
||||
|
@ -404,8 +364,8 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
|
||||
retVal = Arrays
|
||||
.stream(theInvokersLists)
|
||||
.filter(t -> t != null)
|
||||
.flatMap(t -> t.stream())
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(Collection::stream)
|
||||
.sorted()
|
||||
.collect(Collectors.toList());
|
||||
|
||||
|
@ -417,7 +377,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
/**
|
||||
* Only call this when assertions are enabled, it's expensive
|
||||
*/
|
||||
boolean haveAppropriateParams(POINTCUT thePointcut, HookParams theParams) {
|
||||
final boolean haveAppropriateParams(POINTCUT thePointcut, HookParams theParams) {
|
||||
if (theParams.getParamsForType().values().size() != thePointcut.getParameterTypes().size()) {
|
||||
throw new IllegalArgumentException(Msg.code(1909) + String.format("Wrong number of params for pointcut %s - Wanted %s but found %s", thePointcut.name(), toErrorString(thePointcut.getParameterTypes()), theParams.getParamsForType().values().stream().map(t -> t != null ? t.getClass().getSimpleName() : "null").sorted().collect(Collectors.toList())));
|
||||
}
|
||||
|
@ -449,16 +409,16 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
|
||||
// Register the interceptor and its various hooks
|
||||
for (HookInvoker nextAddedHook : addedInvokers) {
|
||||
IPointcut nextPointcut = nextAddedHook.getPointcut();
|
||||
POINTCUT nextPointcut = nextAddedHook.getPointcut();
|
||||
if (nextPointcut.equals(Pointcut.INTERCEPTOR_REGISTERED)) {
|
||||
continue;
|
||||
}
|
||||
theInvokers.put((POINTCUT) nextPointcut, nextAddedHook);
|
||||
theInvokers.put(nextPointcut, nextAddedHook);
|
||||
}
|
||||
|
||||
// Make sure we're always sorted according to the order declared in @Order
|
||||
for (IPointcut nextPointcut : theInvokers.keys()) {
|
||||
List<BaseInvoker> nextInvokerList = theInvokers.get((POINTCUT) nextPointcut);
|
||||
for (POINTCUT nextPointcut : theInvokers.keys()) {
|
||||
List<BaseInvoker> nextInvokerList = theInvokers.get(nextPointcut);
|
||||
nextInvokerList.sort(Comparator.naturalOrder());
|
||||
}
|
||||
|
||||
|
@ -489,60 +449,12 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
|
||||
protected abstract Optional<HookDescriptor> scanForHook(Method nextMethod);
|
||||
|
||||
protected static <T extends Annotation> Optional<T> findAnnotation(AnnotatedElement theObject, Class<T> theHookClass) {
|
||||
T annotation;
|
||||
if (theObject instanceof Method) {
|
||||
annotation = MethodUtils.getAnnotation((Method) theObject, theHookClass, true, true);
|
||||
} else {
|
||||
annotation = theObject.getAnnotation(theHookClass);
|
||||
}
|
||||
return Optional.ofNullable(annotation);
|
||||
}
|
||||
|
||||
private static int determineOrder(Class<?> theInterceptorClass) {
|
||||
int typeOrder = Interceptor.DEFAULT_ORDER;
|
||||
Optional<Interceptor> typeOrderAnnotation = findAnnotation(theInterceptorClass, Interceptor.class);
|
||||
if (typeOrderAnnotation.isPresent()) {
|
||||
typeOrder = typeOrderAnnotation.get().order();
|
||||
}
|
||||
return typeOrder;
|
||||
}
|
||||
|
||||
private static String toErrorString(List<String> theParameterTypes) {
|
||||
return theParameterTypes
|
||||
.stream()
|
||||
.sorted()
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
protected abstract static class BaseInvoker implements Comparable<BaseInvoker> {
|
||||
|
||||
private final int myOrder;
|
||||
private final Object myInterceptor;
|
||||
|
||||
BaseInvoker(Object theInterceptor, int theOrder) {
|
||||
myInterceptor = theInterceptor;
|
||||
myOrder = theOrder;
|
||||
}
|
||||
|
||||
public Object getInterceptor() {
|
||||
return myInterceptor;
|
||||
}
|
||||
|
||||
abstract Object invoke(HookParams theParams);
|
||||
|
||||
@Override
|
||||
public int compareTo(BaseInvoker theInvoker) {
|
||||
return myOrder - theInvoker.myOrder;
|
||||
}
|
||||
}
|
||||
|
||||
private static class HookInvoker extends BaseInvoker {
|
||||
private class HookInvoker extends BaseInvoker {
|
||||
|
||||
private final Method myMethod;
|
||||
private final Class<?>[] myParameterTypes;
|
||||
private final int[] myParameterIndexes;
|
||||
private final IPointcut myPointcut;
|
||||
private final POINTCUT myPointcut;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -579,7 +491,7 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
.toString();
|
||||
}
|
||||
|
||||
public IPointcut getPointcut() {
|
||||
public POINTCUT getPointcut() {
|
||||
return myPointcut;
|
||||
}
|
||||
|
||||
|
@ -624,17 +536,17 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
|
||||
}
|
||||
|
||||
protected static class HookDescriptor {
|
||||
protected class HookDescriptor {
|
||||
|
||||
private final IPointcut myPointcut;
|
||||
private final POINTCUT myPointcut;
|
||||
private final int myOrder;
|
||||
|
||||
public HookDescriptor(IPointcut thePointcut, int theOrder) {
|
||||
public HookDescriptor(POINTCUT thePointcut, int theOrder) {
|
||||
myPointcut = thePointcut;
|
||||
myOrder = theOrder;
|
||||
}
|
||||
|
||||
IPointcut getPointcut() {
|
||||
POINTCUT getPointcut() {
|
||||
return myPointcut;
|
||||
}
|
||||
|
||||
|
@ -644,4 +556,49 @@ public abstract class BaseInterceptorService<POINTCUT extends IPointcut> impleme
|
|||
|
||||
}
|
||||
|
||||
protected abstract static class BaseInvoker implements Comparable<BaseInvoker> {
|
||||
|
||||
private final int myOrder;
|
||||
private final Object myInterceptor;
|
||||
|
||||
BaseInvoker(Object theInterceptor, int theOrder) {
|
||||
myInterceptor = theInterceptor;
|
||||
myOrder = theOrder;
|
||||
}
|
||||
|
||||
public Object getInterceptor() {
|
||||
return myInterceptor;
|
||||
}
|
||||
|
||||
abstract Object invoke(HookParams theParams);
|
||||
|
||||
@Override
|
||||
public int compareTo(BaseInvoker theInvoker) {
|
||||
return myOrder - theInvoker.myOrder;
|
||||
}
|
||||
}
|
||||
|
||||
protected static <T extends Annotation> Optional<T> findAnnotation(AnnotatedElement theObject, Class<T> theHookClass) {
|
||||
T annotation;
|
||||
if (theObject instanceof Method) {
|
||||
annotation = MethodUtils.getAnnotation((Method) theObject, theHookClass, true, true);
|
||||
} else {
|
||||
annotation = theObject.getAnnotation(theHookClass);
|
||||
}
|
||||
return Optional.ofNullable(annotation);
|
||||
}
|
||||
|
||||
private static int determineOrder(Class<?> theInterceptorClass) {
|
||||
return findAnnotation(theInterceptorClass, Interceptor.class)
|
||||
.map(Interceptor::order)
|
||||
.orElse(Interceptor.DEFAULT_ORDER);
|
||||
}
|
||||
|
||||
private static String toErrorString(List<String> theParameterTypes) {
|
||||
return theParameterTypes
|
||||
.stream()
|
||||
.sorted()
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ public class InterceptorService extends BaseInterceptorService<Pointcut> impleme
|
|||
* @param theName The name for this registry (useful for troubleshooting)
|
||||
*/
|
||||
public InterceptorService(String theName) {
|
||||
super(theName);
|
||||
super(Pointcut.class, theName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -72,7 +72,7 @@ public class InterceptorService extends BaseInterceptorService<Pointcut> impleme
|
|||
}
|
||||
|
||||
|
||||
private class AnonymousLambdaInvoker extends BaseInvoker {
|
||||
private static class AnonymousLambdaInvoker extends BaseInvoker {
|
||||
private final IAnonymousInterceptor myHook;
|
||||
private final Pointcut myPointcut;
|
||||
|
||||
|
|
|
@ -142,7 +142,7 @@ public final class HapiSystemProperties {
|
|||
}
|
||||
|
||||
/**
|
||||
* This property sets {@link DaoConfig#setStatusBasedReindexingDisabled(Boolean)} to true when the system starts up.
|
||||
* This property sets {@link JpaStorageSettings#setStatusBasedReindexingDisabled(Boolean)} to true when the system starts up.
|
||||
*/
|
||||
public static void enableStatusBasedReindex() {
|
||||
System.clearProperty(DISABLE_STATUS_BASED_REINDEX);
|
||||
|
|
|
@ -3,18 +3,16 @@ package ca.uhn.fhir.interceptor.executor;
|
|||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.IPointcut;
|
||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.BaseServerResponseException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -24,16 +22,14 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
|||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertSame;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
//import org.junit.jupiter.api.Disabled;
|
||||
|
||||
public class InterceptorServiceTest {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(InterceptorServiceTest.class);
|
||||
private List<String> myInvocations = new ArrayList<>();
|
||||
private final List<String> myInvocations = new ArrayList<>();
|
||||
|
||||
@Test
|
||||
public void testInterceptorWithAnnotationDefinedOnInterface() {
|
||||
|
@ -203,8 +199,11 @@ public class InterceptorServiceTest {
|
|||
// Registered in opposite order to verify that the order on the annotation is used
|
||||
MyTestInterceptorTwo interceptor1 = new MyTestInterceptorTwo();
|
||||
MyTestInterceptorOne interceptor0 = new MyTestInterceptorOne();
|
||||
assertFalse(svc.hasHooks(Pointcut.TEST_RB));
|
||||
svc.registerInterceptor(interceptor1);
|
||||
assertTrue(svc.hasHooks(Pointcut.TEST_RB));
|
||||
svc.registerInterceptor(interceptor0);
|
||||
assertTrue(svc.hasHooks(Pointcut.TEST_RB));
|
||||
|
||||
// Register the manual interceptor (has Order right in the middle)
|
||||
MyTestInterceptorManual myInterceptorManual = new MyTestInterceptorManual();
|
||||
|
@ -236,6 +235,12 @@ public class InterceptorServiceTest {
|
|||
assertTrue(globalInterceptors.get(0) instanceof MyTestInterceptorOne, globalInterceptors.get(0).getClass().toString());
|
||||
assertTrue(globalInterceptors.get(1) instanceof MyTestInterceptorTwo, globalInterceptors.get(1).getClass().toString());
|
||||
|
||||
// Unregister the two others
|
||||
assertTrue(svc.hasHooks(Pointcut.TEST_RB));
|
||||
svc.unregisterInterceptor(interceptor1);
|
||||
assertTrue(svc.hasHooks(Pointcut.TEST_RB));
|
||||
svc.unregisterInterceptor(interceptor0);
|
||||
assertFalse(svc.hasHooks(Pointcut.TEST_RB));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -248,8 +253,10 @@ public class InterceptorServiceTest {
|
|||
svc.registerInterceptor(interceptor1);
|
||||
svc.registerInterceptor(interceptor0);
|
||||
|
||||
boolean outcome = svc.callHooks(Pointcut.TEST_RB, new HookParams("A", "B"));
|
||||
assertTrue(outcome);
|
||||
if (svc.hasHooks(Pointcut.TEST_RB)) {
|
||||
boolean outcome = svc.callHooks(Pointcut.TEST_RB, new HookParams("A", "B"));
|
||||
assertTrue(outcome);
|
||||
}
|
||||
|
||||
assertThat(myInvocations, contains("MyTestInterceptorOne.testRb", "MyTestInterceptorTwo.testRb"));
|
||||
assertSame("A", interceptor0.myLastString0);
|
||||
|
@ -257,6 +264,26 @@ public class InterceptorServiceTest {
|
|||
assertSame("B", interceptor1.myLastString1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvokeAnonymousInterceptorMethods() {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
|
||||
MyTestAnonymousInterceptorOne interceptor0 = new MyTestAnonymousInterceptorOne();
|
||||
MyTestAnonymousInterceptorTwo interceptor1 = new MyTestAnonymousInterceptorTwo();
|
||||
svc.registerAnonymousInterceptor(Pointcut.TEST_RB, interceptor0);
|
||||
svc.registerAnonymousInterceptor(Pointcut.TEST_RB, interceptor1);
|
||||
|
||||
if (svc.hasHooks(Pointcut.TEST_RB)) {
|
||||
boolean outcome = svc.callHooks(Pointcut.TEST_RB, new HookParams("A", "B"));
|
||||
assertTrue(outcome);
|
||||
}
|
||||
|
||||
assertThat(myInvocations, contains("MyTestAnonymousInterceptorOne.testRb", "MyTestAnonymousInterceptorTwo.testRb"));
|
||||
assertSame("A", interceptor0.myLastString0);
|
||||
assertSame("A", interceptor1.myLastString0);
|
||||
assertSame("B", interceptor1.myLastString1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvokeUsingSupplierArg() {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
|
@ -320,8 +347,8 @@ public class InterceptorServiceTest {
|
|||
.add(String.class, null)
|
||||
.add(String.class, null);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals(null, interceptor.myValue0);
|
||||
assertEquals(null, interceptor.myValue1);
|
||||
assertNull(interceptor.myValue0);
|
||||
assertNull(interceptor.myValue1);
|
||||
svc.unregisterAllInterceptors();
|
||||
|
||||
// First null
|
||||
|
@ -331,7 +358,7 @@ public class InterceptorServiceTest {
|
|||
.add(String.class, null)
|
||||
.add(String.class, "A");
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals(null, interceptor.myValue0);
|
||||
assertNull(interceptor.myValue0);
|
||||
assertEquals("A", interceptor.myValue1);
|
||||
svc.unregisterAllInterceptors();
|
||||
|
||||
|
@ -343,7 +370,7 @@ public class InterceptorServiceTest {
|
|||
.add(String.class, null);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals("A", interceptor.myValue0);
|
||||
assertEquals(null, interceptor.myValue1);
|
||||
assertNull(interceptor.myValue1);
|
||||
svc.unregisterAllInterceptors();
|
||||
|
||||
}
|
||||
|
@ -399,9 +426,9 @@ public class InterceptorServiceTest {
|
|||
assertEquals("AAA", e.getMessage());
|
||||
}
|
||||
|
||||
assertEquals(true, interceptor0.myHit);
|
||||
assertEquals(true, interceptor1.myHit);
|
||||
assertEquals(true, interceptor2.myHit);
|
||||
assertTrue(interceptor0.myHit);
|
||||
assertTrue(interceptor1.myHit);
|
||||
assertTrue(interceptor2.myHit);
|
||||
}
|
||||
|
||||
|
||||
|
@ -465,7 +492,7 @@ public class InterceptorServiceTest {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@Test
|
||||
public void testValidateParamTypesWrongParam() {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
|
@ -485,110 +512,6 @@ public class InterceptorServiceTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThreadLocalHookInterceptor() {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
svc.setThreadlocalInvokersEnabled(true);
|
||||
|
||||
HookParams params = new HookParams().add("A").add("B");
|
||||
|
||||
@Interceptor(order = 100)
|
||||
class LocalInterceptor {
|
||||
|
||||
private int myCount = 0;
|
||||
|
||||
@Hook(Pointcut.TEST_RB)
|
||||
public boolean testRb(String theString0, String theString1) {
|
||||
myCount++;
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
LocalInterceptor interceptor = new LocalInterceptor();
|
||||
svc.registerThreadLocalInterceptor(interceptor);
|
||||
try {
|
||||
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals(5, interceptor.myCount);
|
||||
|
||||
} finally {
|
||||
svc.unregisterThreadLocalInterceptor(interceptor);
|
||||
}
|
||||
|
||||
// Call some more - The interceptor is removed so the count shouldn't change
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
assertEquals(5, interceptor.myCount);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* JA 20190321 On my MBP 2018
|
||||
* ThreadLocalEnabled=true - Performed 500000 loops in 8383.0ms - 0.017ms / loop
|
||||
* ThreadLocalEnabled=false - Performed 500000 loops in 3743.0ms - 0.007ms / loop
|
||||
* ThreadLocalEnabled=true - Performed 500000 loops in 6163.0ms - 0.012ms / loop
|
||||
* ThreadLocalEnabled=false - Performed 500000 loops in 3487.0ms - 0.007ms / loop
|
||||
* ThreadLocalEnabled=true - Performed 1000000 loops in 00:00:12.458 - 0.012ms / loop
|
||||
* ThreadLocalEnabled=false - Performed 1000000 loops in 7046.0ms - 0.007ms / loop
|
||||
* </pre>
|
||||
*/
|
||||
@Test
|
||||
@Disabled("Performance test - Not needed normally")
|
||||
public void testThreadLocalHookInterceptorMicroBenchmark() {
|
||||
threadLocalMicroBenchmark(true, 500000);
|
||||
threadLocalMicroBenchmark(false, 500000);
|
||||
threadLocalMicroBenchmark(true, 500000);
|
||||
threadLocalMicroBenchmark(false, 500000);
|
||||
threadLocalMicroBenchmark(true, 500000);
|
||||
threadLocalMicroBenchmark(false, 500000);
|
||||
}
|
||||
|
||||
private void threadLocalMicroBenchmark(boolean theThreadlocalInvokersEnabled, int theCount) {
|
||||
InterceptorService svc = new InterceptorService();
|
||||
svc.setThreadlocalInvokersEnabled(theThreadlocalInvokersEnabled);
|
||||
|
||||
HookParams params = new HookParams().add("A").add("B");
|
||||
|
||||
@Interceptor(order = 100)
|
||||
class LocalInterceptor {
|
||||
|
||||
private int myCount = 0;
|
||||
|
||||
@Hook(Pointcut.TEST_RB)
|
||||
public void testRb(String theString0, String theString1) {
|
||||
myCount++;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
LocalInterceptor interceptor = new LocalInterceptor();
|
||||
StopWatch sw = new StopWatch();
|
||||
for (int i = 0; i < theCount; i++) {
|
||||
|
||||
svc.registerThreadLocalInterceptor(interceptor);
|
||||
try {
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
svc.callHooks(Pointcut.TEST_RB, params);
|
||||
} finally {
|
||||
svc.unregisterThreadLocalInterceptor(interceptor);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ourLog.info("ThreadLocalEnabled={} - Performed {} loops in {} - {} / loop - Outcomne: {}", theThreadlocalInvokersEnabled, theCount, sw.toString(), sw.formatMillisPerOperation(theCount), interceptor.myCount);
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
myInvocations.clear();
|
||||
|
@ -634,6 +557,27 @@ public class InterceptorServiceTest {
|
|||
}
|
||||
}
|
||||
|
||||
public class MyTestAnonymousInterceptorOne implements IAnonymousInterceptor {
|
||||
private String myLastString0;
|
||||
@Override
|
||||
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
||||
myLastString0 = theArgs.get(String.class, 0);
|
||||
myInvocations.add("MyTestAnonymousInterceptorOne.testRb");
|
||||
}
|
||||
}
|
||||
|
||||
public class MyTestAnonymousInterceptorTwo implements IAnonymousInterceptor {
|
||||
private String myLastString0;
|
||||
private String myLastString1;
|
||||
|
||||
@Override
|
||||
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
||||
myLastString0 = theArgs.get(String.class, 0);
|
||||
myLastString1 = theArgs.get(String.class, 1);
|
||||
myInvocations.add("MyTestAnonymousInterceptorTwo.testRb");
|
||||
}
|
||||
}
|
||||
|
||||
@Interceptor(order = 200)
|
||||
public class MyTestInterceptorManual {
|
||||
@Hook(Pointcut.TEST_RB)
|
||||
|
@ -662,12 +606,6 @@ public class InterceptorServiceTest {
|
|||
private static class CanonicalSubscription {
|
||||
}
|
||||
|
||||
/**
|
||||
* Just a make-believe version of this class for the unit test
|
||||
*/
|
||||
private static class ResourceDeliveryMessage {
|
||||
}
|
||||
|
||||
@Interceptor()
|
||||
public static class InterceptorThatFailsOnRegister {
|
||||
|
||||
|
|
|
@ -4,14 +4,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.cli;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.demo.ContextHolder;
|
||||
import ca.uhn.fhir.jpa.demo.FhirServerConfig;
|
||||
import ca.uhn.fhir.jpa.demo.FhirServerConfigDstu3;
|
||||
|
@ -77,7 +77,7 @@ public class RunServerCommand extends BaseCommand {
|
|||
|
||||
addOptionalOption(options, "u", "url", "Url", "If this option is set, specifies the JDBC URL to use for the database connection");
|
||||
|
||||
Long defaultReuseSearchResults = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
Long defaultReuseSearchResults = JpaStorageSettings.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
String defaultReuseSearchResultsStr = defaultReuseSearchResults == null ? "off" : String.valueOf(defaultReuseSearchResults);
|
||||
options.addOption(null, OPTION_REUSE_SEARCH_RESULTS_MILLIS, true, "The time in milliseconds within which the same results will be returned for multiple identical searches, or \"off\" (default is " + defaultReuseSearchResultsStr + ")");
|
||||
return options;
|
||||
|
|
|
@ -89,7 +89,7 @@ public class BulkImportCommandIT {
|
|||
|
||||
private Batch2JobStartResponse createJobStartResponse(String theId) {
|
||||
Batch2JobStartResponse response = new Batch2JobStartResponse();
|
||||
response.setJobId(theId);
|
||||
response.setInstanceId(theId);
|
||||
return response;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -20,10 +20,9 @@ package ca.uhn.fhir.jpa.demo;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
@ -39,20 +38,15 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class CommonConfig {
|
||||
|
||||
/**
|
||||
* Configure FHIR properties around the the JPA server via this bean
|
||||
* Configure FHIR properties around the JPA server via this bean
|
||||
*/
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
retVal.setAllowMultipleDelete(true);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ModelConfig modelConfig() {
|
||||
return daoConfig().getModelConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* The following bean configures the database connection. The 'url' property value of "jdbc:h2:file:target./jpaserver_h2_files" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_h2_files".
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.demo;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
|
@ -36,7 +36,7 @@ public class ContextHolder {
|
|||
private static String ourDatabaseUrl;
|
||||
|
||||
static {
|
||||
ourReuseSearchResultsMillis = DaoConfig.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
ourReuseSearchResultsMillis = JpaStorageSettings.DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS;
|
||||
}
|
||||
|
||||
public static FhirContext getCtx() {
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.demo;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.config.HapiJpaConfig;
|
||||
import ca.uhn.fhir.jpa.config.JpaDstu2Config;
|
||||
import ca.uhn.fhir.jpa.config.util.HapiEntityManagerFactoryUtil;
|
||||
|
@ -56,11 +56,11 @@ public class FhirServerConfig {
|
|||
private Properties myJpaProperties;
|
||||
|
||||
/**
|
||||
* Configure FHIR properties around the the JPA server via this bean
|
||||
* Configure FHIR properties around the JPA server via this bean
|
||||
*/
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
retVal.setAllowMultipleDelete(true);
|
||||
return retVal;
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
|
|||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
|
@ -120,14 +120,14 @@ public class JpaServerDemo extends RestfulServer {
|
|||
if (fhirVersion == FhirVersionEnum.DSTU2) {
|
||||
IFhirSystemDao<Bundle, MetaDt> systemDao = myAppCtx.getBean("mySystemDaoDstu2", IFhirSystemDao.class);
|
||||
JpaConformanceProviderDstu2 confProvider = new JpaConformanceProviderDstu2(this, systemDao,
|
||||
myAppCtx.getBean(DaoConfig.class));
|
||||
myAppCtx.getBean(JpaStorageSettings.class));
|
||||
confProvider.setImplementationDescription("Example Server");
|
||||
setServerConformanceProvider(confProvider);
|
||||
} else if (fhirVersion == FhirVersionEnum.DSTU3) {
|
||||
IFhirSystemDao<org.hl7.fhir.dstu3.model.Bundle, org.hl7.fhir.dstu3.model.Meta> systemDao = myAppCtx
|
||||
.getBean("mySystemDaoDstu3", IFhirSystemDao.class);
|
||||
JpaConformanceProviderDstu3 confProvider = new JpaConformanceProviderDstu3(this, systemDao,
|
||||
myAppCtx.getBean(DaoConfig.class), myAppCtx.getBean(ISearchParamRegistry.class));
|
||||
myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class));
|
||||
confProvider.setImplementationDescription("Example Server");
|
||||
setServerConformanceProvider(confProvider);
|
||||
} else if (fhirVersion == FhirVersionEnum.R4) {
|
||||
|
@ -135,7 +135,7 @@ public class JpaServerDemo extends RestfulServer {
|
|||
.getBean("mySystemDaoR4", IFhirSystemDao.class);
|
||||
IValidationSupport validationSupport = myAppCtx.getBean(IValidationSupport.class);
|
||||
JpaCapabilityStatementProvider confProvider = new JpaCapabilityStatementProvider(this, systemDao,
|
||||
myAppCtx.getBean(DaoConfig.class), myAppCtx.getBean(ISearchParamRegistry.class), validationSupport);
|
||||
myAppCtx.getBean(JpaStorageSettings.class), myAppCtx.getBean(ISearchParamRegistry.class), validationSupport);
|
||||
confProvider.setImplementationDescription("Example Server");
|
||||
setServerConformanceProvider(confProvider);
|
||||
} else {
|
||||
|
@ -168,11 +168,11 @@ public class JpaServerDemo extends RestfulServer {
|
|||
CorsInterceptor corsInterceptor = new CorsInterceptor();
|
||||
registerInterceptor(corsInterceptor);
|
||||
|
||||
DaoConfig daoConfig = myAppCtx.getBean(DaoConfig.class);
|
||||
daoConfig.setAllowExternalReferences(ContextHolder.isAllowExternalRefs());
|
||||
daoConfig.setEnforceReferentialIntegrityOnDelete(!ContextHolder.isDisableReferentialIntegrity());
|
||||
daoConfig.setEnforceReferentialIntegrityOnWrite(!ContextHolder.isDisableReferentialIntegrity());
|
||||
daoConfig.setReuseCachedSearchResultsForMillis(ContextHolder.getReuseCachedSearchResultsForMillis());
|
||||
JpaStorageSettings storageSettings = myAppCtx.getBean(JpaStorageSettings.class);
|
||||
storageSettings.setAllowExternalReferences(ContextHolder.isAllowExternalRefs());
|
||||
storageSettings.setEnforceReferentialIntegrityOnDelete(!ContextHolder.isDisableReferentialIntegrity());
|
||||
storageSettings.setEnforceReferentialIntegrityOnWrite(!ContextHolder.isDisableReferentialIntegrity());
|
||||
storageSettings.setReuseCachedSearchResultsForMillis(ContextHolder.getReuseCachedSearchResultsForMillis());
|
||||
|
||||
DaoRegistry daoRegistry = myAppCtx.getBean(DaoRegistry.class);
|
||||
IInterceptorBroadcaster interceptorBroadcaster = myAppCtx.getBean(IInterceptorBroadcaster.class);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 4545
|
||||
title: "The InterceptorService now maintains an EnumSet of all registered interceptor Pointcuts,
|
||||
which should improve performance when testing for the existence of specific pointcuts."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: change
|
||||
issue: 4545
|
||||
title: "The settings beans for the JPA server have been renamed to better reflect their purpose. Specifically
|
||||
the `ModelConfig` bean has been renamed to `StorageSettings` and the `DaoConfig` bean has been
|
||||
renamed to `JpaStorageSettings`."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: remove
|
||||
issue: 4545
|
||||
title: "The InterceptorService no longer supports ThreadLocal interceptor registrations. This
|
||||
feature was deprecated in 6.2.0 due to lack of use and has never been enabled by default. Please
|
||||
let us know on the mailing list if this affects you."
|
|
@ -20,12 +20,12 @@ Clients may sometimes post resources to your server that contain absolute resour
|
|||
|
||||
By default, the server will reject this reference, as only local references are permitted by the server. This can be changed however.
|
||||
|
||||
If you want the server to recognize that this URL is actually a local reference (i.e. because the server will be deployed to the base URL `http://example.com/fhir/`) you can configure the server to recognize this URL via the following DaoConfig setting:
|
||||
If you want the server to recognize that this URL is actually a local reference (i.e. because the server will be deployed to the base URL `http://example.com/fhir/`) you can configure the server to recognize this URL via the following JpaStorageSettings setting:
|
||||
|
||||
```java
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
// ... other config ...
|
||||
retVal.getTreatBaseUrlsAsLocal().add("http://example.com/fhir/");
|
||||
return retVal;
|
||||
|
@ -36,8 +36,8 @@ On the other hand, if you want the server to be configurable to allow remote ref
|
|||
|
||||
```java
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
// Allow external references
|
||||
retVal.setAllowExternalReferences(true);
|
||||
|
||||
|
@ -59,19 +59,19 @@ etc. For example, you might refer to the ValueSet `http://hl7.org/fhir/ValueSet/
|
|||
resources. In this case, you are not necessarily telling the server that this is a real address that it should resolve,
|
||||
but rather that this is an identifier for a ValueSet where `ValueSet.url` has the given URI/URL.
|
||||
|
||||
HAPI can be configured to treat certain URI/URL patterns as logical by using the DaoConfig#setTreatReferencesAsLogical
|
||||
HAPI can be configured to treat certain URI/URL patterns as logical by using the JpaStorageSettings#setTreatReferencesAsLogical
|
||||
property (
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setTreatReferencesAsLogical(java.util.Set)))
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#setTreatReferencesAsLogical(java.util.Set)))
|
||||
.
|
||||
|
||||
For example:
|
||||
|
||||
```java
|
||||
// Treat specific URL as logical
|
||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
|
||||
myStorageSettings.getTreatReferencesAsLogical().add("http://mysystem.com/ValueSet/cats-and-dogs");
|
||||
|
||||
// Treat all references with given prefix as logical
|
||||
myDaoConfig.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
|
||||
myStorageSettings.getTreatReferencesAsLogical().add("http://mysystem.com/mysystem-vs-*");
|
||||
```
|
||||
|
||||
## Referential Integrity
|
||||
|
@ -88,8 +88,8 @@ Referential integrity can be configured on two levels: `write` and `delete`.
|
|||
#### JPA Server
|
||||
```java
|
||||
@Bean
|
||||
public DaoConfig daoConfig() {
|
||||
DaoConfig retVal = new DaoConfig();
|
||||
public JpaStorageSettings storageSettings() {
|
||||
JpaStorageSettings retVal = new JpaStorageSettings();
|
||||
// ... other config ...
|
||||
retVal.setEnforceReferentialIntegrityOnWrite(true);
|
||||
retVal.setEnforceReferentialIntegrityOnDelete(true);
|
||||
|
@ -116,7 +116,7 @@ Under many normal scenarios this is a n acceptable performance tradeoff, but in
|
|||
You can change the global cache using the following setting:
|
||||
|
||||
```java
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
myStorageSettings.setReuseCachedSearchResultsForMillis(null);
|
||||
```
|
||||
|
||||
### Disable Cache at the Request Level
|
||||
|
@ -168,5 +168,5 @@ X-Retry-On-Version-Conflict: retry; max-retries=100
|
|||
|
||||
Delete with expunge submits a job to delete and expunge the requested resources. This is done in batches. If the DELETE
|
||||
?_expunge=true syntax is used to trigger the delete expunge, then the batch size will be determined by the value
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getExpungeBatchSize())
|
||||
of [Expunge Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#getExpungeBatchSize())
|
||||
property.
|
||||
|
|
|
@ -7,7 +7,7 @@ This is required to support the `_content`, or `_text` search parameters.
|
|||
|
||||
Additional indexing is implemented for simple search parameters of type token, string, and reference.
|
||||
These implement the basic search, as well as several modifiers:
|
||||
This **experimental** feature is enabled via the `setAdvancedHSearchIndexing()` property of DaoConfig.
|
||||
This **experimental** feature is enabled via the `setAdvancedHSearchIndexing()` property of JpaStorageSettings.
|
||||
|
||||
## Search Parameter Support
|
||||
|
||||
|
@ -103,7 +103,7 @@ search index. This allows some queries to return results without using the rela
|
|||
Note: This does not support the $meta-add or $meta-delete operations. Full reindexing is required
|
||||
when this option is enabled after resources have been indexed.
|
||||
|
||||
This **experimental** feature is enabled via the `setStoreResourceInHSearchIndex()` option of DaoConfig.
|
||||
This **experimental** feature is enabled via the `setStoreResourceInHSearchIndex()` option of JpaStorageSettings.
|
||||
|
||||
# Synchronous Writes
|
||||
|
||||
|
|
|
@ -24,9 +24,9 @@ The grouping of Observation resources by `Observation.code` means that the `$las
|
|||
|
||||
# Deployment and Configuration
|
||||
|
||||
The `$lastn` operation is disabled by default. The operation can be enabled by setting the DaoConfig#setLastNEnabled
|
||||
The `$lastn` operation is disabled by default. The operation can be enabled by setting the JpaStorageSettings#setLastNEnabled
|
||||
property (
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setLastNEnabled(boolean)))
|
||||
see [JavaDoc](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setLastNEnabled(boolean)))
|
||||
.
|
||||
|
||||
In addition, the Elasticsearch client service, `ElasticsearchSvcImpl` will need to be instantiated with parameters
|
||||
|
|
|
@ -6,7 +6,7 @@ This page contains information for performance optimization. If you are planning
|
|||
|
||||
The FHIR history operation allows clients to see a change history for a resource, across all resources of a given type, or even across all resources on a server. This operation includes a total count (in `Bundle.total`) that can be very expensive to calculate on large databases with many resources.
|
||||
|
||||
As a result, a setting on the `DaoConfig` object has been added called **History Count Mode**. This setting has 3 possible options:
|
||||
As a result, a setting on the `JpaStorageSettings` object has been added called **History Count Mode**. This setting has 3 possible options:
|
||||
|
||||
* COUNT_CACHED. This is the new default: A loading cache will be used for history counts without any dates specified, meaning that counts are stored in RAM for up to one minute, and the loading cache blocks all but one client thread per JVM from actually performing the count. This effectively throttles access to the database. History operation invocations that include a `_since` or `_to` parameter will never have a count included in the results.
|
||||
|
||||
|
|
|
@ -255,11 +255,11 @@ an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE**
|
|||
visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server.
|
||||
|
||||
If the server has been configured with
|
||||
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
|
||||
of [UUID](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.IdStrategyEnum.html#UUID), or the
|
||||
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.JpaStorageSettings.IdStrategyEnum))
|
||||
of [UUID](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.IdStrategyEnum.html#UUID), or the
|
||||
server has been configured with
|
||||
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum))
|
||||
of [ANY](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/DaoConfig.ClientIdStrategyEnum.html#ANY)
|
||||
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.JpaStorageSettings.ClientIdStrategyEnum))
|
||||
of [ANY](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.ClientIdStrategyEnum.html#ANY)
|
||||
the server will create a Forced ID for all resources (not only resources having textual IDs).
|
||||
|
||||
## Columns
|
||||
|
|
|
@ -41,7 +41,7 @@ As a result, in HAPI FHIR JPA 3.6.0, an efficient way of upgrading existing data
|
|||
In order to perform a migration using this functionality, the following steps should be followed:
|
||||
|
||||
* Stop your running HAPI FHIR JPA instance (and remember to make a backup of your database before proceeding with any changes!)
|
||||
* Modify your `DaoConfig` to specify that hash-based searches should not be used, using the following setting: `myDaoConfig.setDisableHashBasedSearches(true);`
|
||||
* Modify your `JpaStorageSettings` to specify that hash-based searches should not be used, using the following setting: `myStorageSettings.setDisableHashBasedSearches(true);`
|
||||
* Make sure that you have your JPA settings configured to not automatically create database indexes and columns using the following setting in your JPA Properties: `extraProperties.put("hibernate.hbm2ddl.auto", "none");`
|
||||
* Run the database migrator command, including the entry `-x no-migrate-350-hashes` on the command line. For example:
|
||||
|
||||
|
@ -60,7 +60,7 @@ SELECT * FROM HFJ_RES_REINDEX_JOB
|
|||
|
||||
* When this query no longer returns any rows, the reindexing process is complete.
|
||||
* At this time, HAPI FHIR should be stopped once again in order to convert it to using the hash based indexes.
|
||||
* Modify your `DaoConfig` to specify that hash-based searches are used, using the following setting (this is the default setting, so it could also simply be omitted): `myDaoConfig.setDisableHashBasedSearches(false);`
|
||||
* Modify your `JpaStorageSettings` to specify that hash-based searches are used, using the following setting (this is the default setting, so it could also simply be omitted): `myStorageSettings.setDisableHashBasedSearches(false);`
|
||||
* Execute the migrator tool again, this time omitting the flag option, e.g.
|
||||
|
||||
```bash
|
||||
|
|
|
@ -33,8 +33,8 @@ One important caveat is that chaining is currently not supported when using this
|
|||
## Enabling MDM Expansion
|
||||
|
||||
On top of needing to instantiate an MDM module, you must enable this feature in
|
||||
the [DaoConfig](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html) bean, using
|
||||
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setAllowMdmExpansion(boolean))
|
||||
the [StorageSettings](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html) bean, using
|
||||
the [Allow MDM Expansion](/hapi-fhir/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/entity/StorageSettings.html#setAllowMdmExpansion(boolean))
|
||||
property.
|
||||
|
||||
<div class="helpWarningCalloutBox">
|
||||
|
|
|
@ -708,7 +708,7 @@ This operation takes two optional Parameters.
|
|||
<td>0..1</td>
|
||||
<td>
|
||||
The number of links that should be deleted at a time. If omitted, then the batch size will be determined by the value
|
||||
of [Reindex Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#getReindexBatchSize())
|
||||
of [Reindex Batch Size](/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/StorageConfig.html#getReindexBatchSize())
|
||||
property.
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
@ -57,7 +57,7 @@ This fact can have security implications:
|
|||
in use in another partition.
|
||||
|
||||
* In a server using the default configuration of
|
||||
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/DaoConfig.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.DaoConfig.IdStrategyEnum))
|
||||
SEQUENTIAL_NUMERIC [Server ID Strategy](/hapi-fhir/apidocs/hapi-fhir-storage/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.JpaStorageSettings.IdStrategyEnum))
|
||||
a client may be able to infer the IDs of resources in other partitions based on the ID they were assigned.
|
||||
|
||||
These considerations can be addressed by using UUID Server ID Strategy, and disallowing client-assigned IDs.
|
||||
|
|
|
@ -137,7 +137,7 @@ If you wish to update a historical version of a resource without creating a new
|
|||
Update operation. While this operation is not supported by the FHIR specification, it's an enhancement added to
|
||||
specifically to HAPI-FHIR.
|
||||
|
||||
In order to use this new functionality, you must set the `setUpdateWithHistoryRewriteEnabled` setting in the `DaoConfig`
|
||||
In order to use this new functionality, you must set the `setUpdateWithHistoryRewriteEnabled` setting in the `StorageSettings`
|
||||
to true.
|
||||
|
||||
The following API request shows an example of executing a PUT at the following endpoint.
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.junit.jupiter.params.provider.MethodSource;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.net.ssl.SSLException;
|
||||
import javax.net.ssl.SSLHandshakeException;
|
||||
import javax.ws.rs.client.Client;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
@ -23,6 +24,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
|||
import static org.hamcrest.core.IsNot.not;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
/**
|
||||
|
@ -99,7 +101,7 @@ public class JaxRsRestfulClientFactoryTest extends BaseFhirVersionParameterizedT
|
|||
.get(Response.class);
|
||||
fail();
|
||||
} catch (Exception e) {
|
||||
assertEquals(SSLHandshakeException.class, e.getCause().getClass());
|
||||
assertTrue(e.getCause() instanceof SSLException);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.1-SNAPSHOT</version>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -21,9 +21,8 @@ package ca.uhn.fhir.jpa.binstore;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.binary.svc.BaseBinaryStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.binary.api.StoredDetails;
|
||||
import ca.uhn.fhir.jpa.binary.svc.BaseBinaryStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBinaryStorageEntityDao;
|
||||
import ca.uhn.fhir.jpa.model.entity.BinaryStorageEntity;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
|
@ -35,14 +34,12 @@ import org.hibernate.LobHelper;
|
|||
import org.hibernate.Session;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
@ -58,10 +55,6 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
|
|||
private EntityManager myEntityManager;
|
||||
@Autowired
|
||||
private IBinaryStorageEntityDao myBinaryStorageEntityDao;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myPlatformTransactionManager;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
|
|
|
@ -21,12 +21,9 @@ package ca.uhn.fhir.jpa.bulk.export.svc;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.batch2.api.IJobPersistence;
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkExportAppCtx;
|
||||
import ca.uhn.fhir.batch2.jobs.export.models.BulkExportBinaryFileId;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.StatusEnum;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
|
||||
|
@ -38,7 +35,6 @@ import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
|
|||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.util.Batch2JobDefinitionConstants;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBinary;
|
||||
|
@ -58,7 +54,6 @@ import javax.annotation.PostConstruct;
|
|||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
@ -71,14 +66,12 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
|
|||
private final DaoRegistry myDaoRegistry;
|
||||
|
||||
private final PlatformTransactionManager myTxManager;
|
||||
private final JpaStorageSettings myDaoConfig;
|
||||
private final BulkExportHelperService myBulkExportHelperSvc;
|
||||
private final IJobPersistence myJpaJobPersistence;
|
||||
private TransactionTemplate myTxTemplate;
|
||||
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final BulkExportHelperService myBulkExportHelperSvc;
|
||||
|
||||
private final IJobPersistence myJpaJobPersistence;
|
||||
|
||||
public BulkDataExportJobSchedulingHelperImpl(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, DaoConfig theDaoConfig, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence, TransactionTemplate theTxTemplate) {
|
||||
public BulkDataExportJobSchedulingHelperImpl(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, JpaStorageSettings theDaoConfig, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence, TransactionTemplate theTxTemplate) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
myTxManager = theTxManager;
|
||||
myDaoConfig = theDaoConfig;
|
||||
|
|
|
@ -26,7 +26,7 @@ import ca.uhn.fhir.context.RuntimeSearchParam;
|
|||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap;
|
||||
|
@ -95,7 +95,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
|
|||
private BulkExportHelperService myBulkExportHelperSvc;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
@ -140,7 +140,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
|
|||
private LinkedHashSet<JpaPid> getPidsForPatientStyleExport(ExportPIDIteratorParameters theParams, String resourceType, String jobId, RuntimeResourceDefinition def) {
|
||||
LinkedHashSet<JpaPid> pids = new LinkedHashSet<>();
|
||||
// Patient
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
|
||||
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.DISABLED) {
|
||||
String errorMessage = "You attempted to start a Patient Bulk Export, but the system has `Index Missing Fields` disabled. It must be enabled for Patient Bulk Export";
|
||||
ourLog.error(errorMessage);
|
||||
throw new IllegalStateException(Msg.code(797) + errorMessage);
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.svc;
|
|||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||
import ca.uhn.fhir.batch2.importpull.models.Batch2BulkImportPullJobParameters;
|
||||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.util.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.imprt.model.ActivateJobResult;
|
||||
|
@ -79,7 +79,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ
|
|||
private IJobCoordinator myJobCoordinator;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@PostConstruct
|
||||
public void start() {
|
||||
|
@ -163,7 +163,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ
|
|||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public ActivateJobResult activateNextReadyJob() {
|
||||
if (!myDaoConfig.isEnableTaskBulkImportJobExecution()) {
|
||||
if (!myStorageSettings.isEnableTaskBulkImportJobExecution()) {
|
||||
Logs.getBatchTroubleshootingLog().trace("Bulk import job execution is not enabled on this server. No action taken.");
|
||||
return new ActivateJobResult(false, null);
|
||||
}
|
||||
|
@ -295,7 +295,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc, IHasScheduledJ
|
|||
|
||||
ourLog.info("Submitting bulk import with bijob id {} to job scheduler", biJobId);
|
||||
|
||||
return myJobCoordinator.startInstance(request).getJobId();
|
||||
return myJobCoordinator.startInstance(request).getInstanceId();
|
||||
}
|
||||
|
||||
private void addFilesToJob(@Nonnull List<BulkImportJobFileJson> theInitialFiles, BulkImportJobEntity job, int nextSequence) {
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.config;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||
import ca.uhn.fhir.jpa.api.svc.IDeleteExpungeSvc;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
|
@ -48,7 +48,7 @@ public class Batch2SupportConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
DeleteExpungeSqlBuilder deleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, DaoConfig theDaoConfig, IIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) {
|
||||
return new DeleteExpungeSqlBuilder(theResourceTableFKProvider, theDaoConfig, theIdHelper, theResourceLinkDao);
|
||||
DeleteExpungeSqlBuilder deleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, JpaStorageSettings theStorageSettings, IIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) {
|
||||
return new DeleteExpungeSqlBuilder(theResourceTableFKProvider, theStorageSettings, theIdHelper, theResourceLinkDao);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ import ca.uhn.fhir.context.support.IValidationSupport;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
|
@ -214,7 +214,7 @@ public class JpaConfig {
|
|||
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
|
||||
|
||||
@Autowired
|
||||
public DaoConfig myDaoConfig;
|
||||
public JpaStorageSettings myStorageSettings;
|
||||
|
||||
@Bean("myDaoRegistry")
|
||||
public DaoRegistry daoRegistry() {
|
||||
|
@ -291,16 +291,16 @@ public class JpaConfig {
|
|||
|
||||
@Bean(name = "myBinaryStorageInterceptor")
|
||||
@Lazy
|
||||
public BinaryStorageInterceptor<? extends IPrimitiveDatatype<byte[]>> binaryStorageInterceptor(DaoConfig theDaoConfig, FhirContext theCtx) {
|
||||
public BinaryStorageInterceptor<? extends IPrimitiveDatatype<byte[]>> binaryStorageInterceptor(JpaStorageSettings theStorageSettings, FhirContext theCtx) {
|
||||
BinaryStorageInterceptor<? extends IPrimitiveDatatype<byte[]>> interceptor = new BinaryStorageInterceptor<>(theCtx);
|
||||
interceptor.setAllowAutoInflateBinaries(theDaoConfig.isAllowAutoInflateBinaries());
|
||||
interceptor.setAutoInflateBinariesMaximumSize(theDaoConfig.getAutoInflateBinariesMaximumBytes());
|
||||
interceptor.setAllowAutoInflateBinaries(theStorageSettings.isAllowAutoInflateBinaries());
|
||||
interceptor.setAutoInflateBinariesMaximumSize(theStorageSettings.getAutoInflateBinariesMaximumBytes());
|
||||
return interceptor;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public MemoryCacheService memoryCacheService(DaoConfig theDaoConfig) {
|
||||
return new MemoryCacheService(theDaoConfig);
|
||||
public MemoryCacheService memoryCacheService(JpaStorageSettings theStorageSettings) {
|
||||
return new MemoryCacheService(theStorageSettings);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -454,8 +454,8 @@ public class JpaConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, DaoConfig theDaoConfig, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence) {
|
||||
return new BulkDataExportJobSchedulingHelperImpl(theDaoRegistry, theTxManager, theDaoConfig, theBulkExportHelperSvc, theJpaJobPersistence, null);
|
||||
public IBulkDataExportJobSchedulingHelper bulkDataExportJobSchedulingHelper(DaoRegistry theDaoRegistry, PlatformTransactionManager theTxManager, JpaStorageSettings theStorageSettings, BulkExportHelperService theBulkExportHelperSvc, IJobPersistence theJpaJobPersistence) {
|
||||
return new BulkDataExportJobSchedulingHelperImpl(theDaoRegistry, theTxManager, theStorageSettings, theBulkExportHelperSvc, theJpaJobPersistence, null);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -662,7 +662,7 @@ public class JpaConfig {
|
|||
|
||||
@Bean
|
||||
public SearchStrategyFactory searchStrategyFactory(@Autowired(required = false) IFulltextSearchSvc theFulltextSvc) {
|
||||
return new SearchStrategyFactory(myDaoConfig, theFulltextSvc);
|
||||
return new SearchStrategyFactory(myStorageSettings, theFulltextSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -759,8 +759,8 @@ public class JpaConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public SearchParameterDaoValidator searchParameterDaoValidator(FhirContext theFhirContext, DaoConfig theDaoConfig, ISearchParamRegistry theSearchParamRegistry) {
|
||||
return new SearchParameterDaoValidator(theFhirContext, theDaoConfig, theSearchParamRegistry);
|
||||
public SearchParameterDaoValidator searchParameterDaoValidator(FhirContext theFhirContext, JpaStorageSettings theStorageSettings, ISearchParamRegistry theSearchParamRegistry) {
|
||||
return new SearchParameterDaoValidator(theFhirContext, theStorageSettings, theSearchParamRegistry);
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -774,7 +774,6 @@ public class JpaConfig {
|
|||
}
|
||||
|
||||
|
||||
|
||||
@Bean
|
||||
public ITermReindexingSvc termReindexingSvc() {
|
||||
return new TermReindexingSvcImpl();
|
||||
|
@ -786,10 +785,12 @@ public class JpaConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public IMdmLinkDao<JpaPid, MdmLink> mdmLinkDao(){
|
||||
public IMdmLinkDao<JpaPid, MdmLink> mdmLinkDao() {
|
||||
return new MdmLinkDaoJpaImpl();
|
||||
}
|
||||
|
||||
@Bean
|
||||
IMdmLinkImplFactory<MdmLink> mdmLinkImplFactory() {return new JpaMdmLinkImplFactory();}
|
||||
IMdmLinkImplFactory<MdmLink> mdmLinkImplFactory() {
|
||||
return new JpaMdmLinkImplFactory();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.config;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
|
@ -33,7 +33,6 @@ import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.search.ExceptionService;
|
||||
import ca.uhn.fhir.jpa.search.ISynchronousSearchSvc;
|
||||
|
@ -63,7 +62,7 @@ public class SearchConfig {
|
|||
public static final String CONTINUE_TASK = "continueTask";
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory;
|
||||
@Autowired
|
||||
|
@ -71,8 +70,6 @@ public class SearchConfig {
|
|||
@Autowired
|
||||
private HibernatePropertiesProvider myDialectProvider;
|
||||
@Autowired
|
||||
private ModelConfig myModelConfig;
|
||||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
|
@ -115,7 +112,7 @@ public class SearchConfig {
|
|||
public ISearchCoordinatorSvc searchCoordinatorSvc() {
|
||||
return new SearchCoordinatorSvcImpl(
|
||||
myContext,
|
||||
myDaoConfig,
|
||||
myStorageSettings,
|
||||
myInterceptorBroadcaster,
|
||||
myHapiTransactionService,
|
||||
mySearchCacheSvc,
|
||||
|
@ -139,14 +136,13 @@ public class SearchConfig {
|
|||
|
||||
@Bean(name = ISearchBuilder.SEARCH_BUILDER_BEAN_NAME)
|
||||
@Scope("prototype")
|
||||
public ISearchBuilder newSearchBuilder(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType, DaoConfig theDaoConfig) {
|
||||
public ISearchBuilder newSearchBuilder(IDao theDao, String theResourceName, Class<? extends IBaseResource> theResourceType) {
|
||||
return new SearchBuilder(theDao,
|
||||
theResourceName,
|
||||
myDaoConfig,
|
||||
myStorageSettings,
|
||||
myEntityManagerFactory,
|
||||
mySqlBuilderFactory,
|
||||
myDialectProvider,
|
||||
myModelConfig,
|
||||
mySearchParamRegistry,
|
||||
myPartitionSettings,
|
||||
myInterceptorBroadcaster,
|
||||
|
@ -168,7 +164,7 @@ public class SearchConfig {
|
|||
myInterceptorBroadcaster,
|
||||
mySearchBuilderFactory,
|
||||
mySearchResultCacheSvc,
|
||||
myDaoConfig,
|
||||
myStorageSettings,
|
||||
mySearchCacheSvc,
|
||||
myPagingProvider
|
||||
);
|
||||
|
@ -184,7 +180,7 @@ public class SearchConfig {
|
|||
myInterceptorBroadcaster,
|
||||
mySearchBuilderFactory,
|
||||
mySearchResultCacheSvc,
|
||||
myDaoConfig,
|
||||
myStorageSettings,
|
||||
mySearchCacheSvc,
|
||||
myPagingProvider,
|
||||
exceptionService() // singleton
|
||||
|
|
|
@ -12,7 +12,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IJpaDao;
|
||||
|
@ -223,8 +223,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
@Autowired
|
||||
ExpungeService myExpungeService;
|
||||
@Autowired
|
||||
private DaoConfig myConfig;
|
||||
@Autowired
|
||||
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
||||
@Autowired
|
||||
private SearchParamWithInlineReferencesExtractor mySearchParamWithInlineReferencesExtractor;
|
||||
|
@ -358,8 +356,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
@Override
|
||||
public DaoConfig getConfig() {
|
||||
return myConfig;
|
||||
public JpaStorageSettings getStorageSettings() {
|
||||
return myStorageSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -386,7 +384,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
TagDefinition retVal = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.TAG_DEFINITION, key);
|
||||
|
||||
if (retVal == null) {
|
||||
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, () -> new HashMap<>());
|
||||
HashMap<MemoryCacheService.TagDefinitionCacheKey, TagDefinition> resolvedTagDefinitions = theTransactionDetails.getOrCreateUserData(HapiTransactionService.XACT_USERDATA_KEY_RESOLVED_TAG_DEFINITIONS, HashMap::new);
|
||||
retVal = resolvedTagDefinitions.get(key);
|
||||
|
||||
if (retVal == null) {
|
||||
|
@ -526,11 +524,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
public boolean isLogicalReference(IIdType theId) {
|
||||
return LogicalReferenceHelper.isLogicalReference(myConfig.getModelConfig(), theId);
|
||||
return LogicalReferenceHelper.isLogicalReference(myStorageSettings, theId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the resource has changed (either the contents or the tags)
|
||||
* Returns {@literal true} if the resource has changed (either the contents or the tags)
|
||||
*/
|
||||
protected EncodedResource populateResourceIntoEntity(TransactionDetails theTransactionDetails, RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, boolean thePerformIndexing) {
|
||||
if (theEntity.getResourceType() == null) {
|
||||
|
@ -546,7 +544,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
if (thePerformIndexing) {
|
||||
|
||||
encoding = myConfig.getResourceEncoding();
|
||||
encoding = myStorageSettings.getResourceEncoding();
|
||||
|
||||
String resourceType = theEntity.getResourceType();
|
||||
|
||||
|
@ -560,7 +558,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
HashFunction sha256 = Hashing.sha256();
|
||||
HashCode hashCode;
|
||||
String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
if (getConfig().getInlineResourceTextBelowSize() > 0 && encodedResource.length() < getConfig().getInlineResourceTextBelowSize()) {
|
||||
if (getStorageSettings().getInlineResourceTextBelowSize() > 0 && encodedResource.length() < getStorageSettings().getInlineResourceTextBelowSize()) {
|
||||
resourceText = encodedResource;
|
||||
resourceBinary = null;
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
|
@ -592,8 +590,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
}
|
||||
|
||||
boolean skipUpdatingTags = myConfig.isMassIngestionMode() && theEntity.isHasTags();
|
||||
skipUpdatingTags |= myConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
|
||||
boolean skipUpdatingTags = myStorageSettings.isMassIngestionMode() && theEntity.isHasTags();
|
||||
skipUpdatingTags |= myStorageSettings.getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.INLINE;
|
||||
|
||||
if (!skipUpdatingTags) {
|
||||
changed |= updateTags(theTransactionDetails, theRequest, theResource, theEntity);
|
||||
|
@ -615,7 +613,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (thePerformIndexing && !changed) {
|
||||
if (theEntity.getId() == null) {
|
||||
changed = true;
|
||||
} else if (myConfig.isMassIngestionMode()) {
|
||||
} else if (myStorageSettings.isMassIngestionMode()) {
|
||||
|
||||
// Don't check existing - We'll rely on the SHA256 hash only
|
||||
|
||||
|
@ -709,7 +707,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
theExcludeElements.add("id");
|
||||
boolean inlineTagMode = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE;
|
||||
boolean inlineTagMode = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.INLINE;
|
||||
if (hasExtensions || inlineTagMode) {
|
||||
if (!inlineTagMode) {
|
||||
theExcludeElements.add(theResourceType + ".meta.profile");
|
||||
|
@ -1029,7 +1027,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
}
|
||||
|
||||
if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myConfig.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) {
|
||||
if (thePerformIndexing && changed != null && !changed.isChanged() && !theForceUpdate && myStorageSettings.isSuppressUpdatesWithNoChange() && (entity.getVersion() > 1 || theUpdateVersion)) {
|
||||
ourLog.debug("Resource {} has not changed", entity.getIdDt().toUnqualified().getValue());
|
||||
if (theResource != null) {
|
||||
myJpaStorageResourceParser.updateResourceMetadata(entity, theResource);
|
||||
|
@ -1152,7 +1150,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
ResourceTable entity = (ResourceTable) theEntity;
|
||||
|
||||
IBaseResource oldResource;
|
||||
if (getConfig().isMassIngestionMode()) {
|
||||
if (getStorageSettings().isMassIngestionMode()) {
|
||||
oldResource = null;
|
||||
} else {
|
||||
oldResource = myJpaStorageResourceParser.toResource(entity, false);
|
||||
|
@ -1183,7 +1181,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
historyEntity.setDeleted(null);
|
||||
|
||||
// Check if resource is the same
|
||||
ResourceEncodingEnum encoding = myConfig.getResourceEncoding();
|
||||
ResourceEncodingEnum encoding = myStorageSettings.getResourceEncoding();
|
||||
List<String> excludeElements = new ArrayList<>(8);
|
||||
getExcludedElements(historyEntity.getResourceType(), excludeElements, theResource.getMeta());
|
||||
String encodedResourceString = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
|
@ -1192,13 +1190,13 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
historyEntity.setUpdated(theTransactionDetails.getTransactionDate());
|
||||
|
||||
if (!changed && myConfig.isSuppressUpdatesWithNoChange() && (historyEntity.getVersion() > 1)) {
|
||||
if (!changed && myStorageSettings.isSuppressUpdatesWithNoChange() && (historyEntity.getVersion() > 1)) {
|
||||
ourLog.debug("Resource {} has not changed", historyEntity.getIdDt().toUnqualified().getValue());
|
||||
myJpaStorageResourceParser.updateResourceMetadata(historyEntity, theResource);
|
||||
return historyEntity;
|
||||
}
|
||||
|
||||
if (getConfig().getInlineResourceTextBelowSize() > 0 && encodedResourceString.length() < getConfig().getInlineResourceTextBelowSize()) {
|
||||
if (getStorageSettings().getInlineResourceTextBelowSize() > 0 && encodedResourceString.length() < getStorageSettings().getInlineResourceTextBelowSize()) {
|
||||
populateEncodedResource(encodedResource, encodedResourceString, null, ResourceEncodingEnum.JSON);
|
||||
} else {
|
||||
populateEncodedResource(encodedResource, null, resourceBinary, encoding);
|
||||
|
@ -1260,7 +1258,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
private void createHistoryEntry(RequestDetails theRequest, IBaseResource theResource, ResourceTable theEntity, EncodedResource theChanged) {
|
||||
boolean versionedTags = getConfig().getTagStorageMode() == DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
boolean versionedTags = getStorageSettings().getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.VERSIONED;
|
||||
final ResourceHistoryTable historyEntry = theEntity.toHistory(versionedTags);
|
||||
historyEntry.setEncoding(theChanged.getEncoding());
|
||||
historyEntry.setResource(theChanged.getResourceBinary());
|
||||
|
@ -1293,8 +1291,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
String requestId = getRequestId(theRequest, source);
|
||||
source = cleanProvenanceSourceUri(source);
|
||||
|
||||
boolean haveSource = isNotBlank(source) && myConfig.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myConfig.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
boolean haveSource = isNotBlank(source) && myStorageSettings.getStoreMetaSourceInformation().isStoreSourceUri();
|
||||
boolean haveRequestId = isNotBlank(requestId) && myStorageSettings.getStoreMetaSourceInformation().isStoreRequestId();
|
||||
if (haveSource || haveRequestId) {
|
||||
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
|
||||
provenance.setResourceHistoryTable(historyEntry);
|
||||
|
@ -1311,7 +1309,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
private String getRequestId(RequestDetails theRequest, String theSource) {
|
||||
if (myConfig.isPreserveRequestIdInResourceBody()) {
|
||||
if (myStorageSettings.isPreserveRequestIdInResourceBody()) {
|
||||
return StringUtils.substringAfter(theSource, "#");
|
||||
}
|
||||
return theRequest != null ? theRequest.getRequestId() : null;
|
||||
|
@ -1466,7 +1464,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
continue;
|
||||
}
|
||||
|
||||
if (getConfig().isEnforceReferenceTargetTypes()) {
|
||||
if (getStorageSettings().isEnforceReferenceTargetTypes()) {
|
||||
for (IBase nextChild : values) {
|
||||
IBaseReference nextRef = (IBaseReference) nextChild;
|
||||
IIdType referencedId = nextRef.getReferenceElement();
|
||||
|
@ -1487,9 +1485,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
protected void validateMetaCount(int theMetaCount) {
|
||||
if (myConfig.getResourceMetaCountHardLimit() != null) {
|
||||
if (theMetaCount > myConfig.getResourceMetaCountHardLimit()) {
|
||||
throw new UnprocessableEntityException(Msg.code(932) + "Resource contains " + theMetaCount + " meta entries (tag/profile/security label), maximum is " + myConfig.getResourceMetaCountHardLimit());
|
||||
if (myStorageSettings.getResourceMetaCountHardLimit() != null) {
|
||||
if (theMetaCount > myStorageSettings.getResourceMetaCountHardLimit()) {
|
||||
throw new UnprocessableEntityException(Msg.code(932) + "Resource contains " + theMetaCount + " meta entries (tag/profile/security label), maximum is " + myStorageSettings.getResourceMetaCountHardLimit());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1529,7 +1527,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
throw new UnprocessableEntityException(Msg.code(933) + "Resource contains the 'subsetted' tag, and must not be stored as it may contain a subset of available data");
|
||||
}
|
||||
|
||||
if (getConfig().isEnforceReferenceTargetTypes()) {
|
||||
if (getStorageSettings().isEnforceReferenceTargetTypes()) {
|
||||
String resName = getContext().getResourceType(theResource);
|
||||
validateChildReferenceTargetTypes(theResource, resName);
|
||||
}
|
||||
|
@ -1544,8 +1542,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myConfig = theDaoConfig;
|
||||
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
public void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity, ResourceIndexedSearchParams theNewParams) {
|
||||
|
@ -1555,7 +1553,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
} else {
|
||||
theEntity.setNarrativeText(parseNarrativeTextIntoWords(theResource));
|
||||
theEntity.setContentText(parseContentTextIntoWords(theContext, theResource));
|
||||
if (myDaoConfig.isAdvancedHSearchIndexing()) {
|
||||
if (myStorageSettings.isAdvancedHSearchIndexing()) {
|
||||
ExtendedHSearchIndexData hSearchIndexData = myFulltextSearchSvc.extractLuceneIndexData(theResource, theNewParams);
|
||||
theEntity.setLuceneIndexData(hSearchIndexData);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
|
@ -268,7 +268,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
if (getConfig().getResourceServerIdStrategy() == DaoConfig.IdStrategyEnum.UUID) {
|
||||
if (getStorageSettings().getResourceServerIdStrategy() == JpaStorageSettings.IdStrategyEnum.UUID) {
|
||||
theResource.setId(UUID.randomUUID().toString());
|
||||
theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE);
|
||||
}
|
||||
|
@ -376,11 +376,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
boolean createForPureNumericIds = true;
|
||||
createForcedIdIfNeeded(entity, resourceIdBeforeStorage, createForPureNumericIds);
|
||||
} else {
|
||||
boolean createForPureNumericIds = getConfig().getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ALPHANUMERIC;
|
||||
boolean createForPureNumericIds = getStorageSettings().getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC;
|
||||
createForcedIdIfNeeded(entity, resourceIdBeforeStorage, createForPureNumericIds);
|
||||
}
|
||||
} else {
|
||||
switch (getConfig().getResourceClientIdStrategy()) {
|
||||
switch (getStorageSettings().getResourceClientIdStrategy()) {
|
||||
case NOT_ALLOWED:
|
||||
case ALPHANUMERIC:
|
||||
break;
|
||||
|
@ -475,15 +475,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
void validateResourceIdCreation(T theResource, RequestDetails theRequest) {
|
||||
DaoConfig.ClientIdStrategyEnum strategy = getConfig().getResourceClientIdStrategy();
|
||||
JpaStorageSettings.ClientIdStrategyEnum strategy = getStorageSettings().getResourceClientIdStrategy();
|
||||
|
||||
if (strategy == DaoConfig.ClientIdStrategyEnum.NOT_ALLOWED) {
|
||||
if (strategy == JpaStorageSettings.ClientIdStrategyEnum.NOT_ALLOWED) {
|
||||
if (!isSystemRequest(theRequest)) {
|
||||
throw new ResourceNotFoundException(Msg.code(959) + getMessageSanitized("failedToCreateWithClientAssignedIdNotAllowed", theResource.getIdElement().getIdPart()));
|
||||
}
|
||||
}
|
||||
|
||||
if (strategy == DaoConfig.ClientIdStrategyEnum.ALPHANUMERIC) {
|
||||
if (strategy == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) {
|
||||
if (theResource.getIdElement().isIdPartValidLong()) {
|
||||
throw new InvalidRequestException(Msg.code(960) + getMessageSanitized("failedToCreateWithClientAssignedNumericId", theResource.getIdElement().getIdPart()));
|
||||
}
|
||||
|
@ -648,7 +648,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
Set<JpaPid> resourceIds = myMatchResourceUrlService.search(paramMap, myResourceType, theRequest, null);
|
||||
|
||||
if (resourceIds.size() > 1) {
|
||||
if (!getConfig().isAllowMultipleDelete()) {
|
||||
if (!getStorageSettings().isAllowMultipleDelete()) {
|
||||
throw new PreconditionFailedException(Msg.code(962) + getContext().getLocalizer().getMessageSanitized(BaseStorageDao.class, "transactionOperationWithMultipleMatchFailure", "DELETE", theUrl, resourceIds.size()));
|
||||
}
|
||||
}
|
||||
|
@ -720,7 +720,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
private void validateDeleteEnabled() {
|
||||
if (!getConfig().isDeleteEnabled()) {
|
||||
if (!getStorageSettings().isDeleteEnabled()) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "deleteBlockedBecauseDisabled");
|
||||
throw new PreconditionFailedException(Msg.code(966) + msg);
|
||||
}
|
||||
|
@ -834,7 +834,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
private void validateExpungeEnabled() {
|
||||
if (!getConfig().isExpungeEnabled()) {
|
||||
if (!getStorageSettings().isExpungeEnabled()) {
|
||||
throw new MethodNotAllowedException(Msg.code(968) + "$expunge is not enabled on this server");
|
||||
}
|
||||
}
|
||||
|
@ -953,7 +953,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return;
|
||||
}
|
||||
|
||||
if (getConfig().isMarkResourcesForReindexingUponSearchParameterChange()) {
|
||||
if (getStorageSettings().isMarkResourcesForReindexingUponSearchParameterChange()) {
|
||||
|
||||
ReindexJobParameters params = new ReindexJobParameters();
|
||||
|
||||
|
@ -1041,7 +1041,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
ResourceTable latestVersion = readEntityLatestVersion(theResourceId, theRequest, transactionDetails);
|
||||
boolean nonVersionedTags = myDaoConfig.getTagStorageMode() != DaoConfig.TagStorageModeEnum.VERSIONED;
|
||||
boolean nonVersionedTags = myStorageSettings.getTagStorageMode() != JpaStorageSettings.TagStorageModeEnum.VERSIONED;
|
||||
if (latestVersion.getVersion() != entity.getVersion() || nonVersionedTags) {
|
||||
doMetaDelete(theMetaDel, entity, theRequest, transactionDetails);
|
||||
} else {
|
||||
|
@ -1092,7 +1092,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@PostConstruct
|
||||
@Override
|
||||
public void start() {
|
||||
assert getConfig() != null;
|
||||
assert getStorageSettings() != null;
|
||||
|
||||
RuntimeResourceDefinition def = getContext().getResourceDefinition(myResourceType);
|
||||
myResourceName = def.getName();
|
||||
|
@ -1425,7 +1425,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (theParams.getSearchContainedMode() == SearchContainedModeEnum.BOTH) {
|
||||
throw new MethodNotAllowedException(Msg.code(983) + "Contained mode 'both' is not currently supported");
|
||||
}
|
||||
if (theParams.getSearchContainedMode() != SearchContainedModeEnum.FALSE && !myModelConfig.isIndexOnContainedResources()) {
|
||||
if (theParams.getSearchContainedMode() != SearchContainedModeEnum.FALSE && !myStorageSettings.isIndexOnContainedResources()) {
|
||||
throw new MethodNotAllowedException(Msg.code(984) + "Searching with _contained mode enabled is not enabled on this server");
|
||||
}
|
||||
|
||||
|
@ -1480,10 +1480,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (theRequest != null) {
|
||||
|
||||
if (theRequest.isSubRequest()) {
|
||||
Integer max = getConfig().getMaximumSearchResultCountInTransaction();
|
||||
Integer max = getStorageSettings().getMaximumSearchResultCountInTransaction();
|
||||
if (max != null) {
|
||||
Validate.inclusiveBetween(1, Integer.MAX_VALUE, max, "Maximum search result count in transaction ust be a positive integer");
|
||||
theParams.setLoadSynchronousUpTo(getConfig().getMaximumSearchResultCountInTransaction());
|
||||
theParams.setLoadSynchronousUpTo(getStorageSettings().getMaximumSearchResultCountInTransaction());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1517,9 +1517,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return myTransactionService.execute(theRequest, transactionDetails, tx -> {
|
||||
|
||||
if (theParams.getLoadSynchronousUpTo() != null) {
|
||||
theParams.setLoadSynchronousUpTo(Math.min(getConfig().getInternalSynchronousSearchSize(), theParams.getLoadSynchronousUpTo()));
|
||||
theParams.setLoadSynchronousUpTo(Math.min(getStorageSettings().getInternalSynchronousSearchSize(), theParams.getLoadSynchronousUpTo()));
|
||||
} else {
|
||||
theParams.setLoadSynchronousUpTo(getConfig().getInternalSynchronousSearchSize());
|
||||
theParams.setLoadSynchronousUpTo(getStorageSettings().getInternalSynchronousSearchSize());
|
||||
}
|
||||
|
||||
ISearchBuilder builder = mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
|
||||
|
@ -1628,7 +1628,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
Runnable onRollback = () -> theResource.getIdElement().setValue(id);
|
||||
|
||||
// Execute the update in a retryable transaction
|
||||
if (myDaoConfig.isUpdateWithHistoryRewriteEnabled() && theRequest != null && theRequest.isRewriteHistory()) {
|
||||
if (myStorageSettings.isUpdateWithHistoryRewriteEnabled() && theRequest != null && theRequest.isRewriteHistory()) {
|
||||
return myTransactionService.execute(theRequest, theTransactionDetails, tx -> doUpdateWithHistoryRewrite(theResource, theRequest, theTransactionDetails), onRollback);
|
||||
} else {
|
||||
return myTransactionService.execute(theRequest, theTransactionDetails, tx -> doUpdate(theResource, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theRequest, theTransactionDetails), onRollback);
|
||||
|
@ -1770,7 +1770,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
// Validate that there are no resources pointing to the candidate that
|
||||
// would prevent deletion
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
if (getConfig().isEnforceReferentialIntegrityOnDelete()) {
|
||||
if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) {
|
||||
myDeleteConflictService.validateOkToDelete(deleteConflicts, entity, true, theRequest, new TransactionDetails());
|
||||
}
|
||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
|
@ -1839,7 +1839,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
private void validateGivenIdIsAppropriateToRetrieveResource(IIdType theId, BaseHasResource entity) {
|
||||
if (entity.getForcedId() != null) {
|
||||
if (getConfig().getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY) {
|
||||
if (getStorageSettings().getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
|
||||
if (theId.isIdPartValidLong()) {
|
||||
// This means that the resource with the given numeric ID exists, but it has a "forced ID", meaning that
|
||||
// as far as the outside world is concerned, the given ID doesn't exist (it's just an internal pointer
|
||||
|
|
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.dao;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
|
@ -102,11 +102,11 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
}
|
||||
|
||||
private void validateExpungeEnabled(ExpungeOptions theExpungeOptions) {
|
||||
if (!getConfig().isExpungeEnabled()) {
|
||||
if (!getStorageSettings().isExpungeEnabled()) {
|
||||
throw new MethodNotAllowedException(Msg.code(2080) + "$expunge is not enabled on this server");
|
||||
}
|
||||
|
||||
if (theExpungeOptions.isExpungeEverything() && !getConfig().isAllowMultipleDelete()) {
|
||||
if (theExpungeOptions.isExpungeEverything() && !getStorageSettings().isAllowMultipleDelete()) {
|
||||
throw new MethodNotAllowedException(Msg.code(2081) + "Multiple delete is not enabled on this server");
|
||||
}
|
||||
}
|
||||
|
@ -214,7 +214,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().map(t->t.getId()).collect(Collectors.toList());
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {
|
||||
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) {
|
||||
preFetchIndexes(entityIds, "searchParamPresence", "mySearchParamPresents", null);
|
||||
}
|
||||
|
||||
|
@ -280,8 +280,8 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DaoConfig getConfig() {
|
||||
return myDaoConfig;
|
||||
protected JpaStorageSettings getStorageSettings() {
|
||||
return myStorageSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -290,8 +290,8 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.dao;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchClauseBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchIndexExtractor;
|
||||
|
@ -31,7 +31,6 @@ import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder;
|
|||
import ca.uhn.fhir.jpa.dao.search.IHSearchSortHelper;
|
||||
import ca.uhn.fhir.jpa.dao.search.LastNOperation;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.ExtendedHSearchIndexData;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
|
||||
|
@ -84,7 +83,11 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FulltextSearchSvcImpl.class);
|
||||
private static final int DEFAULT_MAX_NON_PAGED_SIZE = 500;
|
||||
|
||||
final private ExtendedHSearchSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedHSearchSearchBuilder();
|
||||
@Autowired
|
||||
ISearchParamExtractor mySearchParamExtractor;
|
||||
@Autowired
|
||||
IIdHelperService myIdHelperService;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
@Autowired
|
||||
|
@ -94,20 +97,9 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
ISearchParamExtractor mySearchParamExtractor;
|
||||
@Autowired
|
||||
IIdHelperService myIdHelperService;
|
||||
|
||||
@Autowired
|
||||
ModelConfig myModelConfig;
|
||||
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private IHSearchSortHelper myExtendedFulltextSortHelper;
|
||||
|
||||
final private ExtendedHSearchSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedHSearchSearchBuilder();
|
||||
|
||||
@Autowired(required = false)
|
||||
private IHSearchEventListener myHSearchEventListener;
|
||||
|
||||
|
@ -120,12 +112,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedHSearchIndexData extractLuceneIndexData(IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
|
||||
String resourceType = myFhirContext.getResourceType(theResource);
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(resourceType);
|
||||
ExtendedHSearchIndexExtractor extractor = new ExtendedHSearchIndexExtractor(
|
||||
myDaoConfig, myFhirContext, activeSearchParams, mySearchParamExtractor, myModelConfig);
|
||||
return extractor.extract(theResource,theNewParams);
|
||||
myStorageSettings, myFhirContext, activeSearchParams, mySearchParamExtractor);
|
||||
return extractor.extract(theResource, theNewParams);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -134,7 +127,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
// keep this in sync with the guts of doSearch
|
||||
boolean requiresHibernateSearchAccess = myParams.containsKey(Constants.PARAM_CONTENT) || myParams.containsKey(Constants.PARAM_TEXT) || myParams.isLastN();
|
||||
|
||||
requiresHibernateSearchAccess |= myDaoConfig.isAdvancedHSearchIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams);
|
||||
requiresHibernateSearchAccess |= myStorageSettings.isAdvancedHSearchIndexing() && myAdvancedIndexQueryBuilder.isSupportsSomeOf(myParams);
|
||||
|
||||
return requiresHibernateSearchAccess;
|
||||
}
|
||||
|
@ -186,10 +179,10 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
|
||||
|
||||
private SearchQueryOptionsStep<?, Long, SearchLoadingOptionsStep, ?, ?> getSearchQueryOptionsStep(
|
||||
String theResourceType, SearchParameterMap theParams, IResourcePersistentId theReferencingPid) {
|
||||
String theResourceType, SearchParameterMap theParams, IResourcePersistentId theReferencingPid) {
|
||||
|
||||
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
|
||||
var query= getSearchSession().search(ResourceTable.class)
|
||||
var query = getSearchSession().search(ResourceTable.class)
|
||||
// The document id is the PK which is pid. We use this instead of _myId to avoid fetching the doc body.
|
||||
.select(
|
||||
// adapt the String docRef.id() to the Long that it really is.
|
||||
|
@ -203,7 +196,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
|
||||
if (theParams.getSort() != null) {
|
||||
query.sort(
|
||||
f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType) );
|
||||
f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType));
|
||||
|
||||
// indicate parameter was processed
|
||||
theParams.setSort(null);
|
||||
|
@ -216,7 +209,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
private PredicateFinalStep buildWhereClause(SearchPredicateFactory f, String theResourceType,
|
||||
SearchParameterMap theParams, IResourcePersistentId theReferencingPid) {
|
||||
return f.bool(b -> {
|
||||
ExtendedHSearchClauseBuilder builder = new ExtendedHSearchClauseBuilder(myFhirContext, myModelConfig, b, f);
|
||||
ExtendedHSearchClauseBuilder builder = new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f);
|
||||
|
||||
/*
|
||||
* Handle _content parameter (resource body content)
|
||||
|
@ -249,7 +242,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
/*
|
||||
* Handle other supported parameters
|
||||
*/
|
||||
if (myDaoConfig.isAdvancedHSearchIndexing() && theParams.getEverythingMode() == null) {
|
||||
if (myStorageSettings.isAdvancedHSearchIndexing() && theParams.getEverythingMode() == null) {
|
||||
myAdvancedIndexQueryBuilder.addAndConsumeAdvancedQueryClauses(builder, theResourceType, theParams, mySearchParamRegistry);
|
||||
}
|
||||
//DROP EARLY HERE IF BOOL IS EMPTY?
|
||||
|
@ -332,7 +325,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
validateHibernateSearchIsEnabled();
|
||||
ensureElastic();
|
||||
|
||||
ValueSetAutocompleteSearch autocomplete = new ValueSetAutocompleteSearch(myFhirContext, myModelConfig, getSearchSession());
|
||||
ValueSetAutocompleteSearch autocomplete = new ValueSetAutocompleteSearch(myFhirContext, myStorageSettings, getSearchSession());
|
||||
|
||||
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
|
||||
return autocomplete.search(theOptions);
|
||||
|
@ -340,13 +333,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
|
||||
/**
|
||||
* Throws an error if configured with Lucene.
|
||||
*
|
||||
* <p>
|
||||
* Some features only work with Elasticsearch.
|
||||
* Lastn and the autocomplete search use nested aggregations which are Elasticsearch-only
|
||||
*/
|
||||
private void ensureElastic() {
|
||||
try {
|
||||
getSearchSession().scope( ResourceTable.class )
|
||||
getSearchSession().scope(ResourceTable.class)
|
||||
.aggregation()
|
||||
.extension(ElasticsearchExtension.get());
|
||||
} catch (SearchException e) {
|
||||
|
@ -360,7 +353,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
public List<IResourcePersistentId> lastN(SearchParameterMap theParams, Integer theMaximumResults) {
|
||||
ensureElastic();
|
||||
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
|
||||
List<Long> pidList = new LastNOperation(getSearchSession(), myFhirContext, myModelConfig, mySearchParamRegistry)
|
||||
List<Long> pidList = new LastNOperation(getSearchSession(), myFhirContext, myStorageSettings, mySearchParamRegistry)
|
||||
.executeLastN(theParams, theMaximumResults);
|
||||
return convertLongsToResourcePersistentIds(pidList);
|
||||
}
|
||||
|
@ -384,7 +377,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
// order resource projections as per thePids
|
||||
ArrayList<Long> pidList = new ArrayList<>(thePids);
|
||||
List<ExtendedHSearchResourceProjection> orderedAsPidsResourceDataList = rawResourceDataList.stream()
|
||||
.sorted( Ordering.explicit(pidList).onResultOf(ExtendedHSearchResourceProjection::getPid) ).collect( Collectors.toList() );
|
||||
.sorted(Ordering.explicit(pidList).onResultOf(ExtendedHSearchResourceProjection::getPid)).collect(Collectors.toList());
|
||||
|
||||
return resourceProjectionsToResources(orderedAsPidsResourceDataList);
|
||||
}
|
||||
|
@ -400,7 +393,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
|
||||
|
||||
private CompositeProjectionOptionsStep<?, ExtendedHSearchResourceProjection> buildResourceSelectClause(
|
||||
SearchProjectionFactory<EntityReference, ResourceTable> f) {
|
||||
SearchProjectionFactory<EntityReference, ResourceTable> f) {
|
||||
return f.composite(
|
||||
ExtendedHSearchResourceProjection::new,
|
||||
f.field("myId", Long.class),
|
||||
|
@ -433,12 +426,12 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
dispatchEvent(IHSearchEventListener.HSearchEventType.SEARCH);
|
||||
|
||||
var query = getSearchSession().search(ResourceTable.class)
|
||||
.select(this::buildResourceSelectClause)
|
||||
.where(f -> buildWhereClause(f, theResourceType, theParams, null));
|
||||
.select(this::buildResourceSelectClause)
|
||||
.where(f -> buildWhereClause(f, theResourceType, theParams, null));
|
||||
|
||||
if (theParams.getSort() != null) {
|
||||
query.sort(
|
||||
f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType) );
|
||||
f -> myExtendedFulltextSortHelper.getSortClauses(f, theParams.getSort(), theResourceType));
|
||||
}
|
||||
|
||||
List<ExtendedHSearchResourceProjection> extendedLuceneResourceProjections = query.fetchHits(offset, limit);
|
||||
|
|
|
@ -99,7 +99,7 @@ public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapi
|
|||
ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion,
|
||||
theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry);
|
||||
|
||||
if (getConfig().isLastNEnabled()) {
|
||||
if (getStorageSettings().isLastNEnabled()) {
|
||||
if (!retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
// Update indexes here for LastN operation.
|
||||
|
|
|
@ -95,7 +95,7 @@ public class JpaResourceDaoValueSet<T extends IBaseResource> extends BaseHapiFhi
|
|||
|
||||
if (isAutocompleteExtension) {
|
||||
// this is a funky extension for NIH. Do our own thing and return.
|
||||
ValueSetAutocompleteOptions options = ValueSetAutocompleteOptions.validateAndParseOptions(myDaoConfig, theContext, theFilter, theCount, theId, theUrl, theValueSet);
|
||||
ValueSetAutocompleteOptions options = ValueSetAutocompleteOptions.validateAndParseOptions(myStorageSettings, theContext, theFilter, theCount, theId, theUrl, theValueSet);
|
||||
if (myFulltextSearch == null || myFulltextSearch.isDisabled()) {
|
||||
throw new InvalidRequestException(Msg.code(2083) + " Autocomplete is not supported on this server, as the fulltext search service is not configured.");
|
||||
} else {
|
||||
|
@ -119,7 +119,7 @@ public class JpaResourceDaoValueSet<T extends IBaseResource> extends BaseHapiFhi
|
|||
throw new InvalidRequestException(Msg.code(1134) + "$expand must EITHER be invoked at the instance level, or have a url specified, or have a ValueSet specified. Can not combine these options.");
|
||||
}
|
||||
|
||||
ValueSetExpansionOptions options = createValueSetExpansionOptions(myDaoConfig, theOffset, theCount, theIncludeHierarchy, theFilter, theDisplayLanguage);
|
||||
ValueSetExpansionOptions options = createValueSetExpansionOptions(myStorageSettings, theOffset, theCount, theIncludeHierarchy, theFilter, theDisplayLanguage);
|
||||
|
||||
IValidationSupport.ValueSetExpansionOutcome outcome;
|
||||
if (haveId) {
|
||||
|
@ -235,7 +235,7 @@ public class JpaResourceDaoValueSet<T extends IBaseResource> extends BaseHapiFhi
|
|||
boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||
ResourceTable retVal = super.updateEntity(theRequestDetails, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry);
|
||||
|
||||
if (getConfig().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
|
||||
if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
|
||||
if (retVal.getDeleted() == null) {
|
||||
ValueSet valueSet = myVersionCanonicalizer.valueSetToCanonical(theResource);
|
||||
myTerminologySvc.storeTermValueSet(retVal, valueSet);
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
|
@ -59,7 +59,6 @@ import org.hl7.fhir.instance.model.api.IBaseCoding;
|
|||
import org.hl7.fhir.instance.model.api.IBaseMetaType;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -82,7 +81,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
@Autowired
|
||||
|
@ -115,7 +114,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
resourceBytes = history.getResource();
|
||||
resourceText = history.getResourceTextVc();
|
||||
resourceEncoding = history.getEncoding();
|
||||
switch (myDaoConfig.getTagStorageMode()) {
|
||||
switch (myStorageSettings.getTagStorageMode()) {
|
||||
case VERSIONED:
|
||||
default:
|
||||
if (history.isHasTags()) {
|
||||
|
@ -158,7 +157,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
resourceBytes = history.getResource();
|
||||
resourceEncoding = history.getEncoding();
|
||||
resourceText = history.getResourceTextVc();
|
||||
switch (myDaoConfig.getTagStorageMode()) {
|
||||
switch (myStorageSettings.getTagStorageMode()) {
|
||||
case VERSIONED:
|
||||
case NON_VERSIONED:
|
||||
if (resource.isHasTags()) {
|
||||
|
@ -183,7 +182,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
version = view.getVersion();
|
||||
provenanceRequestId = view.getProvenanceRequestId();
|
||||
provenanceSourceUri = view.getProvenanceSourceUri();
|
||||
switch (myDaoConfig.getTagStorageMode()) {
|
||||
switch (myStorageSettings.getTagStorageMode()) {
|
||||
case VERSIONED:
|
||||
case NON_VERSIONED:
|
||||
if (theTagList != null) {
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.dao;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||
|
@ -52,7 +52,7 @@ public class ObservationLastNIndexPersistSvc {
|
|||
private IElasticsearchSvc myElasticsearchSvc;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myConfig;
|
||||
private JpaStorageSettings myConfig;
|
||||
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.dao;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
|
@ -92,7 +92,7 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
@Autowired
|
||||
|
@ -169,7 +169,7 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
|||
for (JpaPid next : outcome) {
|
||||
foundIds.add(next.getAssociatedResourceId().toUnqualifiedVersionless().getValue());
|
||||
theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next);
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY || !next.getAssociatedResourceId().isIdPartValidLong()) {
|
||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY || !next.getAssociatedResourceId().isIdPartValidLong()) {
|
||||
idsToPreFetch.add(next.getId());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||
|
@ -120,7 +120,7 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
|
|||
@Autowired
|
||||
private ISearchParamPresentDao mySearchParamPresentDao;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
@Autowired
|
||||
|
@ -316,7 +316,7 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
|
|||
if (resource == null || resource.isParamsComboTokensNonUniquePresent()) {
|
||||
myResourceIndexedComboTokensNonUniqueDao.deleteByResourceId(theResourceLongId);
|
||||
}
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {
|
||||
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) {
|
||||
mySearchParamPresentDao.deleteByResourceId(theResourceLongId);
|
||||
}
|
||||
if (resource == null || resource.isHasLinks()) {
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.dao.index;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
|
@ -48,7 +48,6 @@ import com.google.common.collect.ListMultimap;
|
|||
import com.google.common.collect.MultimapBuilder;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -109,7 +108,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
@Autowired
|
||||
protected IResourceTableDao myResourceTableDao;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private FhirContext myFhirCtx;
|
||||
@Autowired
|
||||
|
@ -214,7 +213,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
} else {
|
||||
// is a forced id
|
||||
// we must resolve!
|
||||
if (myDaoConfig.isDeleteEnabled()) {
|
||||
if (myStorageSettings.isDeleteEnabled()) {
|
||||
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theExcludeDeleted).getPersistentId();
|
||||
retVals.put(id, retVal);
|
||||
} else {
|
||||
|
@ -267,14 +266,14 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
|
||||
/**
|
||||
* Returns true if the given resource ID should be stored in a forced ID. Under default config
|
||||
* (meaning client ID strategy is {@link ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum#ALPHANUMERIC})
|
||||
* (meaning client ID strategy is {@link JpaStorageSettings.ClientIdStrategyEnum#ALPHANUMERIC})
|
||||
* this will return true if the ID has any non-digit characters.
|
||||
* <p>
|
||||
* In {@link ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum#ANY} mode it will always return true.
|
||||
* In {@link JpaStorageSettings.ClientIdStrategyEnum#ANY} mode it will always return true.
|
||||
*/
|
||||
@Override
|
||||
public boolean idRequiresForcedId(String theId) {
|
||||
return myDaoConfig.getResourceClientIdStrategy() == DaoConfig.ClientIdStrategyEnum.ANY || !isValidPid(theId);
|
||||
return myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY || !isValidPid(theId);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -319,7 +318,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
if (!theIds.isEmpty()) {
|
||||
Set<IIdType> idsToCheck = new HashSet<>(theIds.size());
|
||||
for (IIdType nextId : theIds) {
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY) {
|
||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
|
||||
if (nextId.isIdPartValidLong()) {
|
||||
if (!theOnlyForcedIds) {
|
||||
JpaPid jpaPid = JpaPid.fromId(nextId.getIdPartAsLong());
|
||||
|
@ -445,7 +444,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
|
||||
ListMultimap<String, String> typeToIds = MultimapBuilder.hashKeys().arrayListValues().build();
|
||||
for (IIdType nextId : theIds) {
|
||||
if (myDaoConfig.getResourceClientIdStrategy() == DaoConfig.ClientIdStrategyEnum.ANY || !isValidPid(nextId)) {
|
||||
if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY || !isValidPid(nextId)) {
|
||||
if (nextId.hasResourceType()) {
|
||||
typeToIds.put(nextId.getResourceType(), nextId.getIdPart());
|
||||
} else {
|
||||
|
@ -468,7 +467,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
Map<String, List<IResourceLookup<JpaPid>>> retVal = new HashMap<>();
|
||||
RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId);
|
||||
|
||||
if (myDaoConfig.getResourceClientIdStrategy() != DaoConfig.ClientIdStrategyEnum.ANY) {
|
||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
|
||||
List<Long> pids = theId
|
||||
.stream()
|
||||
.filter(t -> isValidPid(t))
|
||||
|
@ -485,7 +484,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
String nextResourceType = nextEntry.getKey();
|
||||
Collection<String> nextIds = nextEntry.getValue();
|
||||
|
||||
if (!myDaoConfig.isDeleteEnabled()) {
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
for (Iterator<String> forcedIdIterator = nextIds.iterator(); forcedIdIterator.hasNext(); ) {
|
||||
String nextForcedId = forcedIdIterator.next();
|
||||
String nextKey = nextResourceType + "/" + nextForcedId;
|
||||
|
@ -528,7 +527,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
retVal.get(forcedId).add(lookup);
|
||||
|
||||
if (!myDaoConfig.isDeleteEnabled()) {
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
String key = resourceType + "/" + forcedId;
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, key, lookup);
|
||||
}
|
||||
|
@ -555,7 +554,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
|
||||
private void resolvePids(@Nonnull RequestPartitionId theRequestPartitionId, List<Long> thePidsToResolve, Map<String, List<IResourceLookup<JpaPid>>> theTargets) {
|
||||
if (!myDaoConfig.isDeleteEnabled()) {
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
for (Iterator<Long> forcedIdIterator = thePidsToResolve.iterator(); forcedIdIterator.hasNext(); ) {
|
||||
Long nextPid = forcedIdIterator.next();
|
||||
String nextKey = Long.toString(nextPid);
|
||||
|
@ -592,7 +591,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
theTargets.put(id, new ArrayList<>());
|
||||
}
|
||||
theTargets.get(id).add(t);
|
||||
if (!myDaoConfig.isDeleteEnabled()) {
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
String nextKey = t.getPersistentId().toString();
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, t);
|
||||
}
|
||||
|
@ -657,7 +656,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theJpaPid.getId(), Optional.empty());
|
||||
}
|
||||
|
||||
if (!myDaoConfig.isDeleteEnabled()) {
|
||||
if (!myStorageSettings.isDeleteEnabled()) {
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(theResourceType, theJpaPid.getId(), theDeletedAt);
|
||||
String nextKey = theJpaPid.toString();
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, lookup);
|
||||
|
|
|
@ -25,7 +25,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
|||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
|
@ -77,7 +77,7 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
@Autowired
|
||||
private MatchResourceUrlService<JpaPid> myMatchResourceUrlService;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
|
@ -120,8 +120,8 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
mySearchParamExtractorService.extractFromResource(theRequestPartitionId, theRequest, theParams, theExistingParams, theEntity, theResource, theTransactionDetails, theFailOnInvalidReference);
|
||||
|
||||
ResourceSearchParams activeSearchParams = mySearchParamRegistry.getActiveSearchParams(theEntity.getResourceType());
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.ENABLED) {
|
||||
theParams.findMissingSearchParams(myPartitionSettings, myDaoConfig.getModelConfig(), theEntity, activeSearchParams);
|
||||
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.ENABLED) {
|
||||
theParams.findMissingSearchParams(myPartitionSettings, myStorageSettings, theEntity, activeSearchParams);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -181,8 +181,8 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfig(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
@ -203,11 +203,11 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
* Handle references within the resource that are match URLs, for example references like "Patient?identifier=foo".
|
||||
* These match URLs are resolved and replaced with the ID of the
|
||||
* matching resource.
|
||||
*
|
||||
* <p>
|
||||
* This method is *only* called from UPDATE path
|
||||
*/
|
||||
public void extractInlineReferences(ExtractInlineReferenceParams theParams) {
|
||||
if (!myDaoConfig.isAllowInlineMatchUrlReferences()) {
|
||||
if (!myStorageSettings.isAllowInlineMatchUrlReferences()) {
|
||||
return;
|
||||
}
|
||||
IBaseResource resource = theParams.getResource();
|
||||
|
@ -283,7 +283,7 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
/*
|
||||
* String Uniques
|
||||
*/
|
||||
if (myDaoConfig.isUniqueIndexesEnabled()) {
|
||||
if (myStorageSettings.isUniqueIndexesEnabled()) {
|
||||
for (ResourceIndexedComboStringUnique next : myDaoSearchParamSynchronizer.subtract(theExistingParams.myComboStringUniques, theParams.myComboStringUniques)) {
|
||||
ourLog.debug("Removing unique index: {}", next);
|
||||
myEntityManager.remove(next);
|
||||
|
@ -291,7 +291,7 @@ public class SearchParamWithInlineReferencesExtractor {
|
|||
}
|
||||
boolean haveNewStringUniqueParams = false;
|
||||
for (ResourceIndexedComboStringUnique next : myDaoSearchParamSynchronizer.subtract(theParams.myComboStringUniques, theExistingParams.myComboStringUniques)) {
|
||||
if (myDaoConfig.isUniqueIndexesCheckedBeforeSave()) {
|
||||
if (myStorageSettings.isUniqueIndexesCheckedBeforeSave()) {
|
||||
ResourceIndexedComboStringUnique existing = myResourceIndexedCompositeStringUniqueDao.findByQueryString(next.getIndexString());
|
||||
if (existing != null) {
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.dao.search;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
|
||||
import ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers;
|
||||
|
@ -97,15 +97,15 @@ public class ExtendedHSearchClauseBuilder {
|
|||
|
||||
final FhirContext myFhirContext;
|
||||
public final BooleanPredicateClausesStep<?> myRootClause;
|
||||
public final ModelConfig myModelConfig;
|
||||
public final StorageSettings myStorageSettings;
|
||||
final PathContext myRootContext;
|
||||
|
||||
final List<TemporalPrecisionEnum> ordinalSearchPrecisions = Arrays.asList(TemporalPrecisionEnum.YEAR, TemporalPrecisionEnum.MONTH, TemporalPrecisionEnum.DAY);
|
||||
|
||||
public ExtendedHSearchClauseBuilder(FhirContext myFhirContext, ModelConfig theModelConfig,
|
||||
public ExtendedHSearchClauseBuilder(FhirContext myFhirContext, StorageSettings theStorageSettings,
|
||||
BooleanPredicateClausesStep<?> theRootClause, SearchPredicateFactory thePredicateFactory) {
|
||||
this.myFhirContext = myFhirContext;
|
||||
this.myModelConfig = theModelConfig;
|
||||
this.myStorageSettings = theStorageSettings;
|
||||
this.myRootClause = theRootClause;
|
||||
myRootContext = PathContext.buildRootContext(theRootClause, thePredicateFactory);
|
||||
}
|
||||
|
@ -534,7 +534,7 @@ public class ExtendedHSearchClauseBuilder {
|
|||
ParamPrefixEnum activePrefix = qtyParam.getPrefix() == null ? ParamPrefixEnum.EQUAL : qtyParam.getPrefix();
|
||||
String quantityElement = joinPath(thePathContext.getContextPath(), INDEX_TYPE_QUANTITY);
|
||||
|
||||
if (myModelConfig.getNormalizedQuantitySearchLevel() == NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED) {
|
||||
if (myStorageSettings.getNormalizedQuantitySearchLevel() == NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED) {
|
||||
QuantityParam canonicalQty = UcumServiceUtil.toCanonicalQuantityOrNull(qtyParam);
|
||||
if (canonicalQty != null) {
|
||||
String valueFieldPath = joinPath(quantityElement, QTY_VALUE_NORM);
|
||||
|
|
|
@ -22,8 +22,7 @@ package ca.uhn.fhir.jpa.dao.search;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
|
@ -59,32 +58,30 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
*/
|
||||
public class ExtendedHSearchIndexExtractor {
|
||||
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final JpaStorageSettings myJpaStorageSettings;
|
||||
private final FhirContext myContext;
|
||||
private final ResourceSearchParams myParams;
|
||||
private final ISearchParamExtractor mySearchParamExtractor;
|
||||
private final ModelConfig myModelConfig;
|
||||
|
||||
public ExtendedHSearchIndexExtractor(DaoConfig theDaoConfig, FhirContext theContext, ResourceSearchParams theActiveParams,
|
||||
ISearchParamExtractor theSearchParamExtractor, ModelConfig theModelConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public ExtendedHSearchIndexExtractor(JpaStorageSettings theJpaStorageSettings, FhirContext theContext, ResourceSearchParams theActiveParams,
|
||||
ISearchParamExtractor theSearchParamExtractor) {
|
||||
myJpaStorageSettings = theJpaStorageSettings;
|
||||
myContext = theContext;
|
||||
myParams = theActiveParams;
|
||||
mySearchParamExtractor = theSearchParamExtractor;
|
||||
myModelConfig = theModelConfig;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public ExtendedHSearchIndexData extract(IBaseResource theResource, ResourceIndexedSearchParams theNewParams) {
|
||||
ExtendedHSearchIndexData retVal = new ExtendedHSearchIndexData(myContext, myModelConfig, theResource);
|
||||
ExtendedHSearchIndexData retVal = new ExtendedHSearchIndexData(myContext, myJpaStorageSettings, theResource);
|
||||
|
||||
if(myDaoConfig.isStoreResourceInHSearchIndex()) {
|
||||
if (myJpaStorageSettings.isStoreResourceInHSearchIndex()) {
|
||||
retVal.setRawResourceData(myContext.newJsonParser().encodeResourceToString(theResource));
|
||||
}
|
||||
|
||||
retVal.setForcedId(theResource.getIdElement().getIdPart());
|
||||
|
||||
// todo add a flag ot DaoConfig to suppress this
|
||||
// todo add a flag ot StorageSettings to suppress this
|
||||
extractAutocompleteTokens(theResource, retVal);
|
||||
|
||||
theNewParams.myStringParams.stream()
|
||||
|
@ -127,8 +124,8 @@ public class ExtendedHSearchIndexExtractor {
|
|||
|
||||
if (theResource.getMeta().getLastUpdated() != null) {
|
||||
int ordinal = ResourceIndexedSearchParamDate.calculateOrdinalValue(theResource.getMeta().getLastUpdated()).intValue();
|
||||
retVal.addDateIndexData("_lastUpdated", theResource.getMeta().getLastUpdated(), ordinal,
|
||||
theResource.getMeta().getLastUpdated(), ordinal);
|
||||
retVal.addDateIndexData("_lastUpdated", theResource.getMeta().getLastUpdated(), ordinal,
|
||||
theResource.getMeta().getLastUpdated(), ordinal);
|
||||
}
|
||||
|
||||
|
||||
|
@ -158,9 +155,9 @@ public class ExtendedHSearchIndexExtractor {
|
|||
// Consider 2 cases for references
|
||||
// Case 1: Resource Type and Resource ID is known
|
||||
// Case 2: Resource is unknown and referred by canonical url reference
|
||||
if(!Strings.isNullOrEmpty(nextLink.getTargetResourceId())) {
|
||||
if (!Strings.isNullOrEmpty(nextLink.getTargetResourceId())) {
|
||||
qualifiedTargetResourceId = nextLink.getTargetResourceType() + "/" + nextLink.getTargetResourceId();
|
||||
} else if(!Strings.isNullOrEmpty(nextLink.getTargetResourceUrl())) {
|
||||
} else if (!Strings.isNullOrEmpty(nextLink.getTargetResourceUrl())) {
|
||||
qualifiedTargetResourceId = nextLink.getTargetResourceUrl();
|
||||
}
|
||||
retVal.addResourceLinkIndexData(nextParamName, qualifiedTargetResourceId);
|
||||
|
@ -171,16 +168,6 @@ public class ExtendedHSearchIndexExtractor {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static DateSearchIndexData convertDate(ResourceIndexedSearchParamDate nextParam) {
|
||||
return new DateSearchIndexData(nextParam.getValueLow(), nextParam.getValueLowDateOrdinal(), nextParam.getValueHigh(), nextParam.getValueHighDateOrdinal());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static QuantitySearchIndexData convertQuantity(ResourceIndexedSearchParamQuantity nextParam) {
|
||||
return new QuantitySearchIndexData(nextParam.getUnits(), nextParam.getSystem(), nextParam.getValue().doubleValue());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private CompositeSearchIndexData buildCompositeIndexData(ResourceIndexedSearchParamComposite theSearchParamComposite) {
|
||||
return new HSearchCompositeSearchIndexDataImpl(theSearchParamComposite);
|
||||
|
@ -192,35 +179,35 @@ public class ExtendedHSearchIndexExtractor {
|
|||
private void extractAutocompleteTokens(IBaseResource theResource, ExtendedHSearchIndexData theRetVal) {
|
||||
// we need to re-index token params to match up display with codes.
|
||||
myParams.values().stream()
|
||||
.filter(p->p.getParamType() == RestSearchParameterTypeEnum.TOKEN)
|
||||
.filter(p -> p.getParamType() == RestSearchParameterTypeEnum.TOKEN)
|
||||
// TODO it would be nice to reuse TokenExtractor
|
||||
.forEach(p-> mySearchParamExtractor.extractValues(p.getPath(), theResource)
|
||||
.forEach(nextValue->indexTokenValue(theRetVal, p, nextValue)
|
||||
));
|
||||
.forEach(p -> mySearchParamExtractor.extractValues(p.getPath(), theResource)
|
||||
.forEach(nextValue -> indexTokenValue(theRetVal, p, nextValue)
|
||||
));
|
||||
}
|
||||
|
||||
private void indexTokenValue(ExtendedHSearchIndexData theRetVal, RuntimeSearchParam p, IBase nextValue) {
|
||||
String nextType = mySearchParamExtractor.toRootTypeName(nextValue);
|
||||
String spName = p.getName();
|
||||
switch (nextType) {
|
||||
case "CodeableConcept":
|
||||
addToken_CodeableConcept(theRetVal, spName, nextValue);
|
||||
break;
|
||||
case "Coding":
|
||||
addToken_Coding(theRetVal, spName, (IBaseCoding) nextValue);
|
||||
break;
|
||||
case "CodeableConcept":
|
||||
addToken_CodeableConcept(theRetVal, spName, nextValue);
|
||||
break;
|
||||
case "Coding":
|
||||
addToken_Coding(theRetVal, spName, (IBaseCoding) nextValue);
|
||||
break;
|
||||
// TODO share this with TokenExtractor and introduce a ITokenIndexer interface.
|
||||
// Ignore unknown types for now.
|
||||
// This is just for autocomplete, and we are focused on Observation.code, category, combo-code, etc.
|
||||
// Ignore unknown types for now.
|
||||
// This is just for autocomplete, and we are focused on Observation.code, category, combo-code, etc.
|
||||
// case "Identifier":
|
||||
// mySearchParamExtractor.addToken_Identifier(myResourceTypeName, params, searchParam, value);
|
||||
// break;
|
||||
// case "ContactPoint":
|
||||
// mySearchParamExtractor.addToken_ContactPoint(myResourceTypeName, params, searchParam, value);
|
||||
// break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private void addToken_CodeableConcept(ExtendedHSearchIndexData theRetVal, String theSpName, IBase theValue) {
|
||||
|
@ -234,4 +221,14 @@ public class ExtendedHSearchIndexExtractor {
|
|||
theRetVal.addTokenIndexData(theSpName, theNextValue);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static DateSearchIndexData convertDate(ResourceIndexedSearchParamDate nextParam) {
|
||||
return new DateSearchIndexData(nextParam.getValueLow(), nextParam.getValueLowDateOrdinal(), nextParam.getValueHigh(), nextParam.getValueHighDateOrdinal());
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static QuantitySearchIndexData convertQuantity(ResourceIndexedSearchParamQuantity nextParam) {
|
||||
return new QuantitySearchIndexData(nextParam.getUnits(), nextParam.getSystem(), nextParam.getValue().doubleValue());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.dao.search;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper;
|
||||
|
@ -39,15 +39,15 @@ public class LastNOperation {
|
|||
public static final String OBSERVATION_RES_TYPE = "Observation";
|
||||
private final SearchSession mySession;
|
||||
private final FhirContext myFhirContext;
|
||||
private final ModelConfig myModelConfig;
|
||||
private final StorageSettings myStorageSettings;
|
||||
private final ISearchParamRegistry mySearchParamRegistry;
|
||||
private final ExtendedHSearchSearchBuilder myExtendedHSearchSearchBuilder = new ExtendedHSearchSearchBuilder();
|
||||
|
||||
public LastNOperation(SearchSession theSession, FhirContext theFhirContext, ModelConfig theModelConfig,
|
||||
public LastNOperation(SearchSession theSession, FhirContext theFhirContext, StorageSettings theStorageSettings,
|
||||
ISearchParamRegistry theSearchParamRegistry) {
|
||||
mySession = theSession;
|
||||
myFhirContext = theFhirContext;
|
||||
myModelConfig = theModelConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
mySearchParamRegistry = theSearchParamRegistry;
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ public class LastNOperation {
|
|||
.where(f -> f.bool(b -> {
|
||||
// Must match observation type
|
||||
b.must(f.match().field("myResourceType").matching(OBSERVATION_RES_TYPE));
|
||||
ExtendedHSearchClauseBuilder builder = new ExtendedHSearchClauseBuilder(myFhirContext, myModelConfig, b, f);
|
||||
ExtendedHSearchClauseBuilder builder = new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, b, f);
|
||||
myExtendedHSearchSearchBuilder.addAndConsumeAdvancedQueryClauses(builder, OBSERVATION_RES_TYPE, theParams.clone(), mySearchParamRegistry);
|
||||
}))
|
||||
.aggregation(observationsByCodeKey, f -> f.fromJson(lastNAggregation.toAggregation()))
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
* @see ca.uhn.fhir.jpa.dao.search.ExtendedHSearchSearchBuilder
|
||||
* @see ca.uhn.fhir.jpa.model.search.SearchParamTextPropertyBinder
|
||||
*
|
||||
* Activated by {@link ca.uhn.fhir.jpa.api.config.DaoConfig#setAdvancedHSearchIndexing(boolean)}.
|
||||
* Activated by {@link ca.uhn.fhir.jpa.api.config.JpaStorageSettings#setAdvancedHSearchIndexing(boolean)}.
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao.search;
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteConflict;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
|
@ -61,7 +61,7 @@ public class DeleteConflictService {
|
|||
@Autowired
|
||||
DeleteConflictFinderService myDeleteConflictFinderService;
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private FhirContext myFhirContext;
|
||||
|
||||
|
@ -75,7 +75,7 @@ public class DeleteConflictService {
|
|||
}
|
||||
|
||||
private DeleteConflictOutcome handleConflicts(RequestDetails theRequest, DeleteConflictList theDeleteConflicts, ResourceTable theEntity, boolean theForValidate, List<ResourceLink> theResultList, TransactionDetails theTransactionDetails) {
|
||||
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete() && !theForValidate) {
|
||||
if (!myStorageSettings.isEnforceReferentialIntegrityOnDelete() && !theForValidate) {
|
||||
ourLog.debug("Deleting {} resource dependencies which can no longer be satisfied", theResultList.size());
|
||||
myResourceLinkDao.deleteAll(theResultList);
|
||||
return null;
|
||||
|
@ -128,7 +128,7 @@ public class DeleteConflictService {
|
|||
int shouldRetryCount = Math.min(outcome.getShouldRetryCount(), MAX_RETRY_ATTEMPTS);
|
||||
if (!(retryCount < shouldRetryCount)) break;
|
||||
newConflicts = new DeleteConflictList(newConflicts);
|
||||
outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, myDaoConfig.getMaximumDeleteConflictQueryCount(), theTransactionDetails);
|
||||
outcome = findAndHandleConflicts(theRequest, newConflicts, theEntity, theForValidate, myStorageSettings.getMaximumDeleteConflictQueryCount(), theTransactionDetails);
|
||||
++retryCount;
|
||||
}
|
||||
theDeleteConflicts.addAll(newConflicts);
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.delete.batch2;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ResourceForeignKey;
|
||||
|
@ -44,13 +44,13 @@ public class DeleteExpungeSqlBuilder {
|
|||
public static final String THREAD_PREFIX = "delete-expunge";
|
||||
|
||||
private final ResourceTableFKProvider myResourceTableFKProvider;
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
private final IIdHelperService myIdHelper;
|
||||
private final IResourceLinkDao myResourceLinkDao;
|
||||
|
||||
public DeleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, DaoConfig theDaoConfig, IIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) {
|
||||
public DeleteExpungeSqlBuilder(ResourceTableFKProvider theResourceTableFKProvider, JpaStorageSettings theStorageSettings, IIdHelperService theIdHelper, IResourceLinkDao theResourceLinkDao) {
|
||||
myResourceTableFKProvider = theResourceTableFKProvider;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
myIdHelper = theIdHelper;
|
||||
myResourceLinkDao = theResourceLinkDao;
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ public class DeleteExpungeSqlBuilder {
|
|||
}
|
||||
|
||||
public void validateOkToDeleteAndExpunge(List<Long> thePids) {
|
||||
if (!myDaoConfig.isEnforceReferentialIntegrityOnDelete()) {
|
||||
if (!myStorageSettings.isEnforceReferentialIntegrityOnDelete()) {
|
||||
ourLog.info("Referential integrity on delete disabled. Skipping referential integrity check.");
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
|
|||
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
|
@ -1710,7 +1710,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
spidxString
|
||||
.addTask(new CalculateHashesTask(VersionEnum.V3_5_0, "20180903.28")
|
||||
.setColumnName("HASH_NORM_PREFIX")
|
||||
.addCalculator("HASH_NORM_PREFIX", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new PartitionSettings(), RequestPartitionId.defaultPartition(), new ModelConfig(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED")))
|
||||
.addCalculator("HASH_NORM_PREFIX", t -> ResourceIndexedSearchParamString.calculateHashNormalized(new PartitionSettings(), RequestPartitionId.defaultPartition(), new StorageSettings(), t.getResourceType(), t.getString("SP_NAME"), t.getString("SP_VALUE_NORMALIZED")))
|
||||
.addCalculator("HASH_EXACT", t -> ResourceIndexedSearchParamString.calculateHashExact(new PartitionSettings(), (ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId) null, t.getResourceType(), t.getParamName(), t.getString("SP_VALUE_EXACT")))
|
||||
);
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.provider;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
|
@ -51,7 +51,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class JpaCapabilityStatementProvider extends ServerCapabilityStatementProvider {
|
||||
|
||||
private final FhirContext myContext;
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
private String myImplementationDescription;
|
||||
private boolean myIncludeResourceCounts;
|
||||
private IFhirSystemDao<?, ?> mySystemDao;
|
||||
|
@ -59,17 +59,17 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro
|
|||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public JpaCapabilityStatementProvider(@Nonnull RestfulServer theRestfulServer, @Nonnull IFhirSystemDao<?, ?> theSystemDao, @Nonnull DaoConfig theDaoConfig, @Nonnull ISearchParamRegistry theSearchParamRegistry, IValidationSupport theValidationSupport) {
|
||||
public JpaCapabilityStatementProvider(@Nonnull RestfulServer theRestfulServer, @Nonnull IFhirSystemDao<?, ?> theSystemDao, @Nonnull JpaStorageSettings theStorageSettings, @Nonnull ISearchParamRegistry theSearchParamRegistry, IValidationSupport theValidationSupport) {
|
||||
super(theRestfulServer, theSearchParamRegistry, theValidationSupport);
|
||||
|
||||
Validate.notNull(theRestfulServer);
|
||||
Validate.notNull(theSystemDao);
|
||||
Validate.notNull(theDaoConfig);
|
||||
Validate.notNull(theStorageSettings);
|
||||
Validate.notNull(theSearchParamRegistry);
|
||||
|
||||
myContext = theRestfulServer.getFhirContext();
|
||||
mySystemDao = theSystemDao;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
setIncludeResourceCounts(true);
|
||||
}
|
||||
|
||||
|
@ -91,9 +91,9 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro
|
|||
protected void postProcessRest(FhirTerser theTerser, IBase theRest) {
|
||||
super.postProcessRest(theTerser, theRest);
|
||||
|
||||
if (myDaoConfig.getSupportedSubscriptionTypes().contains(org.hl7.fhir.dstu2.model.Subscription.SubscriptionChannelType.WEBSOCKET)) {
|
||||
if (isNotBlank(myDaoConfig.getWebsocketContextPath())) {
|
||||
ExtensionUtil.setExtension(myContext, theRest, Constants.CAPABILITYSTATEMENT_WEBSOCKET_URL, "uri", myDaoConfig.getWebsocketContextPath());
|
||||
if (myStorageSettings.getSupportedSubscriptionTypes().contains(org.hl7.fhir.dstu2.model.Subscription.SubscriptionChannelType.WEBSOCKET)) {
|
||||
if (isNotBlank(myStorageSettings.getWebsocketContextPath())) {
|
||||
ExtensionUtil.setExtension(myContext, theRest, Constants.CAPABILITYSTATEMENT_WEBSOCKET_URL, "uri", myStorageSettings.getWebsocketContextPath());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro
|
|||
|
||||
theTerser.addElement(theResource, "versioning", ResourceVersionPolicy.VERSIONEDUPDATE.toCode());
|
||||
|
||||
if (myDaoConfig.isAllowMultipleDelete()) {
|
||||
if (myStorageSettings.isAllowMultipleDelete()) {
|
||||
theTerser.addElement(theResource, "conditionalDelete", ConditionalDeleteStatus.MULTIPLE.toCode());
|
||||
} else {
|
||||
theTerser.addElement(theResource, "conditionalDelete", ConditionalDeleteStatus.SINGLE.toCode());
|
||||
|
@ -132,8 +132,8 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro
|
|||
myIncludeResourceCounts = theIncludeResourceCounts;
|
||||
}
|
||||
|
||||
public void setDaoConfig(DaoConfig myDaoConfig) {
|
||||
this.myDaoConfig = myDaoConfig;
|
||||
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
|
||||
this.myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
@CoverageIgnore
|
||||
|
@ -148,6 +148,6 @@ public class JpaCapabilityStatementProvider extends ServerCapabilityStatementPro
|
|||
|
||||
@Override
|
||||
protected boolean searchParamEnabled(String theSearchParam) {
|
||||
return !Constants.PARAM_FILTER.equals(theSearchParam) || myDaoConfig.isFilterParameterEnabled();
|
||||
return !Constants.PARAM_FILTER.equals(theSearchParam) || myStorageSettings.isFilterParameterEnabled();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.provider;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.util.ResourceCountCache;
|
||||
import ca.uhn.fhir.model.api.ExtensionDt;
|
||||
|
@ -58,7 +58,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class JpaConformanceProviderDstu2 extends ServerConformanceProvider {
|
||||
|
||||
private volatile Conformance myCachedValue;
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
private String myImplementationDescription;
|
||||
private boolean myIncludeResourceCounts;
|
||||
private RestfulServer myRestfulServer;
|
||||
|
@ -78,11 +78,11 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider {
|
|||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public JpaConformanceProviderDstu2(RestfulServer theRestfulServer, IFhirSystemDao<Bundle, MetaDt> theSystemDao, DaoConfig theDaoConfig) {
|
||||
public JpaConformanceProviderDstu2(RestfulServer theRestfulServer, IFhirSystemDao<Bundle, MetaDt> theSystemDao, JpaStorageSettings theStorageSettings) {
|
||||
super(theRestfulServer);
|
||||
myRestfulServer = theRestfulServer;
|
||||
mySystemDao = theSystemDao;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
super.setCache(false);
|
||||
setIncludeResourceCounts(true);
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider {
|
|||
for (RestResource nextResource : nextRest.getResource()) {
|
||||
|
||||
ConditionalDeleteStatusEnum conditionalDelete = nextResource.getConditionalDeleteElement().getValueAsEnum();
|
||||
if (conditionalDelete == ConditionalDeleteStatusEnum.MULTIPLE_DELETES_SUPPORTED && myDaoConfig.isAllowMultipleDelete() == false) {
|
||||
if (conditionalDelete == ConditionalDeleteStatusEnum.MULTIPLE_DELETES_SUPPORTED && myStorageSettings.isAllowMultipleDelete() == false) {
|
||||
nextResource.setConditionalDelete(ConditionalDeleteStatusEnum.SINGLE_DELETES_SUPPORTED);
|
||||
}
|
||||
|
||||
|
@ -131,11 +131,11 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider {
|
|||
}
|
||||
}
|
||||
|
||||
if (myDaoConfig.getSupportedSubscriptionTypes().contains(Subscription.SubscriptionChannelType.WEBSOCKET)) {
|
||||
if (isNotBlank(myDaoConfig.getWebsocketContextPath())) {
|
||||
if (myStorageSettings.getSupportedSubscriptionTypes().contains(Subscription.SubscriptionChannelType.WEBSOCKET)) {
|
||||
if (isNotBlank(myStorageSettings.getWebsocketContextPath())) {
|
||||
ExtensionDt websocketExtension = new ExtensionDt();
|
||||
websocketExtension.setUrl(Constants.CAPABILITYSTATEMENT_WEBSOCKET_URL);
|
||||
websocketExtension.setValue(new UriDt(myDaoConfig.getWebsocketContextPath()));
|
||||
websocketExtension.setValue(new UriDt(myStorageSettings.getWebsocketContextPath()));
|
||||
retVal.getRestFirstRep().addUndeclaredExtension(websocketExtension);
|
||||
}
|
||||
}
|
||||
|
@ -151,8 +151,8 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider {
|
|||
return myIncludeResourceCounts;
|
||||
}
|
||||
|
||||
public void setDaoConfig(DaoConfig myDaoConfig) {
|
||||
this.myDaoConfig = myDaoConfig;
|
||||
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
|
||||
this.myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
@CoverageIgnore
|
||||
|
|
|
@ -26,7 +26,7 @@ import ca.uhn.fhir.context.support.IValidationSupport;
|
|||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
|
||||
import ca.uhn.fhir.jpa.config.JpaConfig;
|
||||
|
@ -39,6 +39,7 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.hl7.fhir.common.hapi.validation.support.ValidationSupportChain;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
|
@ -61,8 +62,6 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
@Autowired
|
||||
protected IValidationSupport myValidationSupport;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
@Autowired
|
||||
private ITermReadSvc myTermReadSvc;
|
||||
|
@ -70,24 +69,13 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
@Qualifier(JpaConfig.JPA_VALIDATION_SUPPORT_CHAIN)
|
||||
private ValidationSupportChain myValidationSupportChain;
|
||||
|
||||
public void setValidationSupport(IValidationSupport theValidationSupport) {
|
||||
myValidationSupport = theValidationSupport;
|
||||
}
|
||||
|
||||
public void setDaoConfig(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
}
|
||||
|
||||
public void setDaoRegistry(DaoRegistry theDaoRegistry) {
|
||||
@VisibleForTesting
|
||||
public void setDaoRegistryForUnitTest(DaoRegistry theDaoRegistry) {
|
||||
myDaoRegistry = theDaoRegistry;
|
||||
}
|
||||
|
||||
public void setTermReadSvc(ITermReadSvc theTermReadSvc) {
|
||||
myTermReadSvc = theTermReadSvc;
|
||||
}
|
||||
|
||||
public void setValidationSupportChain(ValidationSupportChain theValidationSupportChain) {
|
||||
myValidationSupportChain = theValidationSupportChain;
|
||||
public void setValidationSupport(IValidationSupport theValidationSupport) {
|
||||
myValidationSupport = theValidationSupport;
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_EXPAND, idempotent = true, typeName = "ValueSet")
|
||||
|
@ -200,8 +188,8 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
}
|
||||
|
||||
|
||||
public static ValueSetExpansionOptions createValueSetExpansionOptions(DaoConfig theDaoConfig, IPrimitiveType<Integer> theOffset, IPrimitiveType<Integer> theCount, IPrimitiveType<Boolean> theIncludeHierarchy, IPrimitiveType<String> theFilter, IPrimitiveType<String> theDisplayLanguage) {
|
||||
int offset = theDaoConfig.getPreExpandValueSetsDefaultOffset();
|
||||
public static ValueSetExpansionOptions createValueSetExpansionOptions(JpaStorageSettings theStorageSettings, IPrimitiveType<Integer> theOffset, IPrimitiveType<Integer> theCount, IPrimitiveType<Boolean> theIncludeHierarchy, IPrimitiveType<String> theFilter, IPrimitiveType<String> theDisplayLanguage) {
|
||||
int offset = theStorageSettings.getPreExpandValueSetsDefaultOffset();
|
||||
if (theOffset != null && theOffset.hasValue()) {
|
||||
if (theOffset.getValue() >= 0) {
|
||||
offset = theOffset.getValue();
|
||||
|
@ -210,7 +198,7 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
}
|
||||
}
|
||||
|
||||
int count = theDaoConfig.getPreExpandValueSetsDefaultCount();
|
||||
int count = theStorageSettings.getPreExpandValueSetsDefaultCount();
|
||||
if (theCount != null && theCount.hasValue()) {
|
||||
if (theCount.getValue() >= 0) {
|
||||
count = theCount.getValue();
|
||||
|
@ -218,7 +206,7 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
throw new InvalidRequestException(Msg.code(1136) + "count parameter for $expand operation must be >= 0 when specified. count: " + theCount.getValue());
|
||||
}
|
||||
}
|
||||
int countMax = theDaoConfig.getPreExpandValueSetsMaxCount();
|
||||
int countMax = theStorageSettings.getPreExpandValueSetsMaxCount();
|
||||
if (count > countMax) {
|
||||
ourLog.warn("count parameter for $expand operation of {} exceeds maximum value of {}; using maximum value.", count, countMax);
|
||||
count = countMax;
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.provider.dstu3;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
|
@ -59,7 +59,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.server.ServerCapabilityStatementProvider {
|
||||
|
||||
private volatile CapabilityStatement myCachedValue;
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
private String myImplementationDescription;
|
||||
private boolean myIncludeResourceCounts;
|
||||
|
@ -81,11 +81,11 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
|
|||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public JpaConformanceProviderDstu3(RestfulServer theRestfulServer, IFhirSystemDao<Bundle, Meta> theSystemDao, DaoConfig theDaoConfig, ISearchParamRegistry theSearchParamRegistry) {
|
||||
public JpaConformanceProviderDstu3(RestfulServer theRestfulServer, IFhirSystemDao<Bundle, Meta> theSystemDao, JpaStorageSettings theStorageSettings, ISearchParamRegistry theSearchParamRegistry) {
|
||||
super(theRestfulServer);
|
||||
myRestfulServer = theRestfulServer;
|
||||
mySystemDao = theSystemDao;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
myServerConfiguration = theRestfulServer.createConfiguration();
|
||||
super.setCache(false);
|
||||
setSearchParamRegistry(theSearchParamRegistry);
|
||||
|
@ -114,7 +114,7 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
|
|||
nextResource.setVersioning(ResourceVersionPolicy.VERSIONEDUPDATE);
|
||||
|
||||
ConditionalDeleteStatus conditionalDelete = nextResource.getConditionalDelete();
|
||||
if (conditionalDelete == ConditionalDeleteStatus.MULTIPLE && myDaoConfig.isAllowMultipleDelete() == false) {
|
||||
if (conditionalDelete == ConditionalDeleteStatus.MULTIPLE && myStorageSettings.isAllowMultipleDelete() == false) {
|
||||
nextResource.setConditionalDelete(ConditionalDeleteStatus.SINGLE);
|
||||
}
|
||||
|
||||
|
@ -172,11 +172,11 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
|
|||
|
||||
massage(retVal);
|
||||
|
||||
if (myDaoConfig.getSupportedSubscriptionTypes().contains(org.hl7.fhir.dstu2.model.Subscription.SubscriptionChannelType.WEBSOCKET)) {
|
||||
if (isNotBlank(myDaoConfig.getWebsocketContextPath())) {
|
||||
if (myStorageSettings.getSupportedSubscriptionTypes().contains(org.hl7.fhir.dstu2.model.Subscription.SubscriptionChannelType.WEBSOCKET)) {
|
||||
if (isNotBlank(myStorageSettings.getWebsocketContextPath())) {
|
||||
Extension websocketExtension = new Extension();
|
||||
websocketExtension.setUrl(Constants.CAPABILITYSTATEMENT_WEBSOCKET_URL);
|
||||
websocketExtension.setValue(new UriType(myDaoConfig.getWebsocketContextPath()));
|
||||
websocketExtension.setValue(new UriType(myStorageSettings.getWebsocketContextPath()));
|
||||
retVal.getRestFirstRep().addExtension(websocketExtension);
|
||||
}
|
||||
}
|
||||
|
@ -218,7 +218,7 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
|
|||
|
||||
protected boolean searchParamEnabled(String theSearchParam) {
|
||||
// Borrowed from hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/provider/ServerCapabilityStatementProvider.java
|
||||
return !Constants.PARAM_FILTER.equals(theSearchParam) || myDaoConfig.isFilterParameterEnabled();
|
||||
return !Constants.PARAM_FILTER.equals(theSearchParam) || myStorageSettings.isFilterParameterEnabled();
|
||||
}
|
||||
|
||||
|
||||
|
@ -275,8 +275,8 @@ public class JpaConformanceProviderDstu3 extends org.hl7.fhir.dstu3.hapi.rest.se
|
|||
// nothing
|
||||
}
|
||||
|
||||
public void setDaoConfig(DaoConfig myDaoConfig) {
|
||||
this.myDaoConfig = myDaoConfig;
|
||||
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
|
||||
this.myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
@CoverageIgnore
|
||||
|
|
|
@ -25,7 +25,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
|
@ -100,7 +100,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
@Autowired
|
||||
private RequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
@Autowired
|
||||
|
@ -271,7 +271,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
|
||||
boolean haveOffset = mySearchEntity.getLastUpdatedLow() != null || mySearchEntity.getLastUpdatedHigh() != null;
|
||||
|
||||
switch (myDaoConfig.getHistoryCountMode()) {
|
||||
switch (myStorageSettings.getHistoryCountMode()) {
|
||||
case COUNT_ACCURATE: {
|
||||
int count = supplier.apply(key);
|
||||
mySearchEntity.setTotalCount(count);
|
||||
|
@ -391,7 +391,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
|
||||
List<JpaPid> includedPidList = new ArrayList<>();
|
||||
if (mySearchEntity.getSearchType() == SearchTypeEnum.SEARCH) {
|
||||
Integer maxIncludes = myDaoConfig.getMaximumIncludesToLoadPerPage();
|
||||
Integer maxIncludes = myStorageSettings.getMaximumIncludesToLoadPerPage();
|
||||
|
||||
// Load _revincludes
|
||||
Set<JpaPid> includedPids = theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes);
|
||||
|
@ -432,8 +432,8 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
|
|
@ -26,7 +26,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
|
@ -99,7 +99,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchCoordinatorSvcImpl.class);
|
||||
|
||||
private final FhirContext myContext;
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
private final IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
private final HapiTransactionService myTxService;
|
||||
private final ISearchCacheSvc mySearchCacheSvc;
|
||||
|
@ -128,7 +128,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
*/
|
||||
public SearchCoordinatorSvcImpl(
|
||||
FhirContext theContext,
|
||||
DaoConfig theDaoConfig,
|
||||
JpaStorageSettings theStorageSettings,
|
||||
IInterceptorBroadcaster theInterceptorBroadcaster,
|
||||
HapiTransactionService theTxService,
|
||||
ISearchCacheSvc theSearchCacheSvc,
|
||||
|
@ -145,7 +145,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
) {
|
||||
super();
|
||||
myContext = theContext;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
myInterceptorBroadcaster = theInterceptorBroadcaster;
|
||||
myTxService = theTxService;
|
||||
mySearchCacheSvc = theSearchCacheSvc;
|
||||
|
@ -369,7 +369,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
|
||||
if (cacheStatus != SearchCacheStatusEnum.NOT_TRIED) {
|
||||
if (theParams.getEverythingMode() == null) {
|
||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null) {
|
||||
if (myStorageSettings.getReuseCachedSearchResultsForMillis() != null) {
|
||||
PersistedJpaBundleProvider foundSearchProvider = findCachedQuery(theParams, theResourceType, theRequestDetails, queryString, theRequestPartitionId);
|
||||
if (foundSearchProvider != null) {
|
||||
foundSearchProvider.setCacheStatus(SearchCacheStatusEnum.HIT);
|
||||
|
@ -525,7 +525,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
@Nullable
|
||||
private Search findSearchToUseOrNull(String theQueryString, String theResourceType, RequestPartitionId theRequestPartitionId) {
|
||||
// createdCutoff is in recent past
|
||||
final Instant createdCutoff = Instant.now().minus(myDaoConfig.getReuseCachedSearchResultsForMillis(), ChronoUnit.MILLIS);
|
||||
final Instant createdCutoff = Instant.now().minus(myStorageSettings.getReuseCachedSearchResultsForMillis(), ChronoUnit.MILLIS);
|
||||
|
||||
Optional<Search> candidate = mySearchCacheSvc.findCandidatesForReuse(theResourceType, theQueryString, createdCutoff, theRequestPartitionId);
|
||||
return candidate.orElse(null);
|
||||
|
@ -537,8 +537,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
if (theCacheControlDirective != null && theCacheControlDirective.isNoStore()) {
|
||||
if (theCacheControlDirective.getMaxResults() != null) {
|
||||
loadSynchronousUpTo = theCacheControlDirective.getMaxResults();
|
||||
if (loadSynchronousUpTo > myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit()) {
|
||||
throw new InvalidRequestException(Msg.code(1165) + Constants.HEADER_CACHE_CONTROL + " header " + Constants.CACHE_CONTROL_MAX_RESULTS + " value must not exceed " + myDaoConfig.getCacheControlNoStoreMaxResultsUpperLimit());
|
||||
if (loadSynchronousUpTo > myStorageSettings.getCacheControlNoStoreMaxResultsUpperLimit()) {
|
||||
throw new InvalidRequestException(Msg.code(1165) + Constants.HEADER_CACHE_CONTROL + " header " + Constants.CACHE_CONTROL_MAX_RESULTS + " value must not exceed " + myStorageSettings.getCacheControlNoStoreMaxResultsUpperLimit());
|
||||
}
|
||||
} else {
|
||||
loadSynchronousUpTo = 100;
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.search;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
|
@ -37,7 +37,7 @@ import java.util.function.Supplier;
|
|||
* Figure out how we're going to run the query up front, and build a branchless strategy object.
|
||||
*/
|
||||
public class SearchStrategyFactory {
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
@Nullable
|
||||
private final IFulltextSearchSvc myFulltextSearchSvc;
|
||||
|
||||
|
@ -52,16 +52,16 @@ public class SearchStrategyFactory {
|
|||
// public class JPAHybridHSearchSavedSearch implements ISearchStrategy {};
|
||||
// public class SavedSearchAdaptorStrategy implements ISearchStrategy {};
|
||||
|
||||
public SearchStrategyFactory(DaoConfig theDaoConfig, @Nullable IFulltextSearchSvc theFulltextSearchSvc) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public SearchStrategyFactory(JpaStorageSettings theStorageSettings, @Nullable IFulltextSearchSvc theFulltextSearchSvc) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
myFulltextSearchSvc = theFulltextSearchSvc;
|
||||
}
|
||||
|
||||
public boolean isSupportsHSearchDirect(String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails) {
|
||||
return
|
||||
myFulltextSearchSvc != null &&
|
||||
myDaoConfig.isStoreResourceInHSearchIndex() &&
|
||||
myDaoConfig.isAdvancedHSearchIndexing() &&
|
||||
myStorageSettings.isStoreResourceInHSearchIndex() &&
|
||||
myStorageSettings.isAdvancedHSearchIndexing() &&
|
||||
myFulltextSearchSvc.supportsAllOf(theParams) &&
|
||||
theParams.getSummaryMode() == null &&
|
||||
theParams.getSearchTotalMode() == null;
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.search;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
|
@ -44,7 +44,7 @@ import static ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl.SEARCH_CLE
|
|||
public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc, IHasScheduledJobs {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(StaleSearchDeletingSvcImpl.class);
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
|
||||
|
@ -75,7 +75,7 @@ public class StaleSearchDeletingSvcImpl implements IStaleSearchDeletingSvc, IHas
|
|||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public synchronized void schedulePollForStaleSearches() {
|
||||
if (!myDaoConfig.isSchedulingDisabled() && myDaoConfig.isEnableTaskStaleSearchCleanup()) {
|
||||
if (!myStorageSettings.isSchedulingDisabled() && myStorageSettings.isEnableTaskStaleSearchCleanup()) {
|
||||
pollForStaleSearchesAndDeleteThem();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
|
@ -68,7 +68,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
|
|||
private FhirContext myContext;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@Autowired
|
||||
private SearchBuilderFactory mySearchBuilderFactory;
|
||||
|
@ -93,7 +93,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
|
|||
searchRuntimeDetails.setLoadSynchronous(true);
|
||||
|
||||
boolean theParamWantOnlyCount = isWantOnlyCount(theParams);
|
||||
boolean theParamOrConfigWantCount = nonNull(theParams.getSearchTotalMode()) ? isWantCount(theParams) : isWantCount(myDaoConfig.getDefaultTotalMode());
|
||||
boolean theParamOrConfigWantCount = nonNull(theParams.getSearchTotalMode()) ? isWantCount(theParams) : isWantCount(myStorageSettings.getDefaultTotalMode());
|
||||
boolean wantCount = theParamWantOnlyCount || theParamOrConfigWantCount;
|
||||
|
||||
// Execute the query and make sure we return distinct results
|
||||
|
@ -166,7 +166,7 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
|
|||
*/
|
||||
|
||||
// _includes
|
||||
Integer maxIncludes = myDaoConfig.getMaximumIncludesToLoadPerPage();
|
||||
Integer maxIncludes = myStorageSettings.getMaximumIncludesToLoadPerPage();
|
||||
final Set<JpaPid> includedPids = theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated(), "(synchronous)", theRequestDetails, maxIncludes);
|
||||
if (maxIncludes != null) {
|
||||
maxIncludes -= includedPids.size();
|
||||
|
@ -245,8 +245,8 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
|
|||
return theLoadSynchronousUpTo;
|
||||
} else if (theParams.getCount() != null) {
|
||||
return theParams.getCount();
|
||||
} else if (myDaoConfig.getFetchSizeDefaultMaximum() != null) {
|
||||
return myDaoConfig.getFetchSizeDefaultMaximum();
|
||||
} else if (myStorageSettings.getFetchSizeDefaultMaximum() != null) {
|
||||
return myStorageSettings.getFetchSizeDefaultMaximum();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.search.autocomplete;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedHSearchClauseBuilder;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import com.google.gson.JsonObject;
|
||||
import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension;
|
||||
|
@ -48,12 +48,12 @@ class TokenAutocompleteSearch {
|
|||
private static final AggregationKey<JsonObject> AGGREGATION_KEY = AggregationKey.of("autocomplete");
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final ModelConfig myModelConfig;
|
||||
private final StorageSettings myStorageSettings;
|
||||
private final SearchSession mySession;
|
||||
|
||||
public TokenAutocompleteSearch(FhirContext theFhirContext, ModelConfig theModelConfig, SearchSession theSession) {
|
||||
public TokenAutocompleteSearch(FhirContext theFhirContext, StorageSettings theStorageSettings, SearchSession theSession) {
|
||||
myFhirContext = theFhirContext;
|
||||
myModelConfig = theModelConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
mySession = theSession;
|
||||
}
|
||||
|
||||
|
@ -74,7 +74,7 @@ class TokenAutocompleteSearch {
|
|||
// compose the query json
|
||||
SearchQueryOptionsStep<?, ?, SearchLoadingOptionsStep, ?, ?> query = mySession.search(ResourceTable.class)
|
||||
.where(predFactory -> predFactory.bool(boolBuilder -> {
|
||||
ExtendedHSearchClauseBuilder clauseBuilder = new ExtendedHSearchClauseBuilder(myFhirContext, myModelConfig, boolBuilder, predFactory);
|
||||
ExtendedHSearchClauseBuilder clauseBuilder = new ExtendedHSearchClauseBuilder(myFhirContext, myStorageSettings, boolBuilder, predFactory);
|
||||
|
||||
// we apply resource-level predicates here, at the top level
|
||||
if (isNotBlank(theResourceName)) {
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.search.autocomplete;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -68,7 +68,7 @@ public class ValueSetAutocompleteOptions {
|
|||
}
|
||||
|
||||
public static ValueSetAutocompleteOptions validateAndParseOptions(
|
||||
DaoConfig theDaoConfig,
|
||||
JpaStorageSettings theStorageSettings,
|
||||
IPrimitiveType<String> theContext,
|
||||
IPrimitiveType<String> theFilter,
|
||||
IPrimitiveType<Integer> theCount,
|
||||
|
@ -82,7 +82,7 @@ public class ValueSetAutocompleteOptions {
|
|||
if (haveId || haveIdentifier || haveValueSet) {
|
||||
throw new InvalidRequestException(Msg.code(2020) + "$expand with contexDirection='existing' is only supported at the type leve. It is not supported at instance level, with a url specified, or with a ValueSet .");
|
||||
}
|
||||
if (!theDaoConfig.isAdvancedHSearchIndexing()) {
|
||||
if (!theStorageSettings.isAdvancedHSearchIndexing()) {
|
||||
throw new InvalidRequestException(Msg.code(2022) + "$expand with contexDirection='existing' requires Extended Lucene Indexing.");
|
||||
}
|
||||
if (theContext == null || theContext.isEmpty()) {
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.search.autocomplete;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -35,14 +35,14 @@ import java.util.List;
|
|||
*/
|
||||
public class ValueSetAutocompleteSearch {
|
||||
private final FhirContext myFhirContext;
|
||||
private final ModelConfig myModelConfig;
|
||||
private final StorageSettings myStorageSettings;
|
||||
private final TokenAutocompleteSearch myAutocompleteSearch;
|
||||
static final int DEFAULT_SIZE = 30;
|
||||
|
||||
public ValueSetAutocompleteSearch(FhirContext theFhirContext, ModelConfig theModelConfig, SearchSession theSession) {
|
||||
public ValueSetAutocompleteSearch(FhirContext theFhirContext, StorageSettings theStorageSettings, SearchSession theSession) {
|
||||
myFhirContext = theFhirContext;
|
||||
myModelConfig = theModelConfig;
|
||||
myAutocompleteSearch = new TokenAutocompleteSearch(myFhirContext, myModelConfig, theSession);
|
||||
myStorageSettings = theStorageSettings;
|
||||
myAutocompleteSearch = new TokenAutocompleteSearch(myFhirContext, myStorageSettings, theSession);
|
||||
}
|
||||
|
||||
public IBaseResource search(ValueSetAutocompleteOptions theOptions) {
|
||||
|
|
|
@ -25,11 +25,10 @@ import ca.uhn.fhir.context.RuntimeSearchParam;
|
|||
import ca.uhn.fhir.exception.TokenParamFormatInvalidRequestException;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
|
||||
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
|
||||
|
@ -137,13 +136,12 @@ public class QueryStack {
|
|||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(QueryStack.class);
|
||||
|
||||
private final ModelConfig myModelConfig;
|
||||
private final FhirContext myFhirContext;
|
||||
private final SearchQueryBuilder mySqlBuilder;
|
||||
private final SearchParameterMap mySearchParameters;
|
||||
private final ISearchParamRegistry mySearchParamRegistry;
|
||||
private final PartitionSettings myPartitionSettings;
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
private final EnumSet<PredicateBuilderTypeEnum> myReusePredicateBuilderTypes;
|
||||
private Map<PredicateBuilderCacheKey, BaseJoiningPredicateBuilder> myJoinMap;
|
||||
// used for _offset queries with sort, should be removed once the fix is applied to the async path too.
|
||||
|
@ -152,24 +150,22 @@ public class QueryStack {
|
|||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public QueryStack(SearchParameterMap theSearchParameters, DaoConfig theDaoConfig, ModelConfig theModelConfig, FhirContext theFhirContext, SearchQueryBuilder theSqlBuilder, ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings) {
|
||||
this(theSearchParameters, theDaoConfig, theModelConfig, theFhirContext, theSqlBuilder, theSearchParamRegistry, thePartitionSettings, EnumSet.of(PredicateBuilderTypeEnum.DATE));
|
||||
public QueryStack(SearchParameterMap theSearchParameters, JpaStorageSettings theStorageSettings, FhirContext theFhirContext, SearchQueryBuilder theSqlBuilder, ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings) {
|
||||
this(theSearchParameters, theStorageSettings, theFhirContext, theSqlBuilder, theSearchParamRegistry, thePartitionSettings, EnumSet.of(PredicateBuilderTypeEnum.DATE));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
private QueryStack(SearchParameterMap theSearchParameters, DaoConfig theDaoConfig, ModelConfig theModelConfig, FhirContext theFhirContext, SearchQueryBuilder theSqlBuilder, ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings, EnumSet<PredicateBuilderTypeEnum> theReusePredicateBuilderTypes) {
|
||||
private QueryStack(SearchParameterMap theSearchParameters, JpaStorageSettings theStorageSettings, FhirContext theFhirContext, SearchQueryBuilder theSqlBuilder, ISearchParamRegistry theSearchParamRegistry, PartitionSettings thePartitionSettings, EnumSet<PredicateBuilderTypeEnum> theReusePredicateBuilderTypes) {
|
||||
myPartitionSettings = thePartitionSettings;
|
||||
assert theSearchParameters != null;
|
||||
assert theDaoConfig != null;
|
||||
assert theModelConfig != null;
|
||||
assert theStorageSettings != null;
|
||||
assert theFhirContext != null;
|
||||
assert theSqlBuilder != null;
|
||||
|
||||
mySearchParameters = theSearchParameters;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myModelConfig = theModelConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
myFhirContext = theFhirContext;
|
||||
mySqlBuilder = theSqlBuilder;
|
||||
mySearchParamRegistry = theSearchParamRegistry;
|
||||
|
@ -398,7 +394,7 @@ public class QueryStack {
|
|||
// TODO - Change this when we have HFJ_SPIDX_MISSING table
|
||||
/**
|
||||
* How we search depends on if the
|
||||
* {@link DaoConfig#getIndexMissingFields()} property
|
||||
* {@link JpaStorageSettings#getIndexMissingFields()} property
|
||||
* is Enabled or Disabled.
|
||||
*
|
||||
* If it is, we will use the SP_MISSING values set into the various
|
||||
|
@ -422,7 +418,7 @@ public class QueryStack {
|
|||
* that do not have a missing field (:missing=false) for much the same reason.
|
||||
*/
|
||||
SearchQueryBuilder sqlBuilder = theParams.getSqlBuilder();
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
|
||||
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.DISABLED) {
|
||||
// new search
|
||||
return createMissingPredicateForUnindexedMissingFields(theParams, sqlBuilder);
|
||||
} else {
|
||||
|
@ -884,7 +880,7 @@ public class QueryStack {
|
|||
.collect(Collectors.toList());
|
||||
|
||||
BaseQuantityPredicateBuilder join = null;
|
||||
boolean normalizedSearchEnabled = myModelConfig.getNormalizedQuantitySearchLevel().equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED);
|
||||
boolean normalizedSearchEnabled = myStorageSettings.getNormalizedQuantitySearchLevel().equals(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_SUPPORTED);
|
||||
if (normalizedSearchEnabled) {
|
||||
List<QuantityParam> normalizedQuantityParams = quantityParams
|
||||
.stream()
|
||||
|
@ -1258,7 +1254,7 @@ public class QueryStack {
|
|||
.collect(Collectors.toSet()));
|
||||
// discrete -> contained -> discrete
|
||||
updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(mergePaths(nextChain.get(0).getPath(), nextChain.get(1).getPath())), leafNodes);
|
||||
if (myModelConfig.isIndexOnContainedResourcesRecursively()) {
|
||||
if (myStorageSettings.isIndexOnContainedResourcesRecursively()) {
|
||||
// discrete -> contained -> contained
|
||||
updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(),
|
||||
leafNodes
|
||||
|
@ -1285,7 +1281,7 @@ public class QueryStack {
|
|||
.stream()
|
||||
.map(t -> t.withPathPrefix(nextChain.get(2).getResourceType(), nextChain.get(2).getSearchParameterName()))
|
||||
.collect(Collectors.toSet()));
|
||||
if (myModelConfig.isIndexOnContainedResourcesRecursively()) {
|
||||
if (myStorageSettings.isIndexOnContainedResourcesRecursively()) {
|
||||
// discrete -> contained -> contained -> discrete
|
||||
updateMapOfReferenceLinks(referenceLinks, Lists.newArrayList(mergePaths(nextChain.get(0).getPath(), nextChain.get(1).getPath(), nextChain.get(2).getPath())), leafNodes);
|
||||
// discrete -> discrete -> contained -> contained
|
||||
|
@ -1390,7 +1386,7 @@ public class QueryStack {
|
|||
}
|
||||
|
||||
private Condition createPredicateSource(@Nullable DbColumn theSourceJoinColumn, List<? extends IQueryParameterType> theList) {
|
||||
if (myDaoConfig.getStoreMetaSourceInformation() == DaoConfig.StoreMetaSourceInformationEnum.NONE) {
|
||||
if (myStorageSettings.getStoreMetaSourceInformation() == JpaStorageSettings.StoreMetaSourceInformationEnum.NONE) {
|
||||
String msg = myFhirContext.getLocalizer().getMessage(QueryStack.class, "sourceParamDisabled");
|
||||
throw new InvalidRequestException(Msg.code(1216) + msg);
|
||||
}
|
||||
|
@ -1568,10 +1564,10 @@ public class QueryStack {
|
|||
if (id.isText()) {
|
||||
|
||||
// Check whether the :text modifier is actually enabled here
|
||||
boolean tokenTextIndexingEnabled = BaseSearchParamExtractor.tokenTextIndexingEnabledForSearchParam(myModelConfig, theSearchParam);
|
||||
boolean tokenTextIndexingEnabled = BaseSearchParamExtractor.tokenTextIndexingEnabledForSearchParam(myStorageSettings, theSearchParam);
|
||||
if (!tokenTextIndexingEnabled) {
|
||||
String msg;
|
||||
if (myModelConfig.isSuppressStringIndexingInTokens()) {
|
||||
if (myStorageSettings.isSuppressStringIndexingInTokens()) {
|
||||
msg = myFhirContext.getLocalizer().getMessage(QueryStack.class, "textModifierDisabledForServer");
|
||||
} else {
|
||||
msg = myFhirContext.getLocalizer().getMessage(QueryStack.class, "textModifierDisabledForSearchParam");
|
||||
|
@ -1680,7 +1676,7 @@ public class QueryStack {
|
|||
}
|
||||
|
||||
public QueryStack newChildQueryFactoryWithFullBuilderReuse() {
|
||||
return new QueryStack(mySearchParameters, myDaoConfig, myModelConfig, myFhirContext, mySqlBuilder, mySearchParamRegistry, myPartitionSettings, EnumSet.allOf(PredicateBuilderTypeEnum.class));
|
||||
return new QueryStack(mySearchParameters, myStorageSettings, myFhirContext, mySqlBuilder, mySearchParamRegistry, myPartitionSettings, EnumSet.allOf(PredicateBuilderTypeEnum.class));
|
||||
}
|
||||
|
||||
@Nullable
|
||||
|
@ -1700,7 +1696,7 @@ public class QueryStack {
|
|||
case Constants.PARAM_TAG:
|
||||
case Constants.PARAM_PROFILE:
|
||||
case Constants.PARAM_SECURITY:
|
||||
if (myDaoConfig.getTagStorageMode() == DaoConfig.TagStorageModeEnum.INLINE) {
|
||||
if (myStorageSettings.getTagStorageMode() == JpaStorageSettings.TagStorageModeEnum.INLINE) {
|
||||
return createPredicateSearchParameter(theSourceJoinColumn, theResourceName, theParamName, theAndOrParams, theRequest, theRequestPartitionId);
|
||||
} else {
|
||||
return createPredicateTag(theSourceJoinColumn, theAndOrParams, theParamName, theRequestPartitionId);
|
||||
|
@ -1812,7 +1808,7 @@ public class QueryStack {
|
|||
}
|
||||
if (filter != null) {
|
||||
|
||||
if (!myDaoConfig.isFilterParameterEnabled()) {
|
||||
if (!myStorageSettings.isFilterParameterEnabled()) {
|
||||
throw new InvalidRequestException(Msg.code(1222) + Constants.PARAM_FILTER + " parameter is disabled on this server");
|
||||
}
|
||||
|
||||
|
@ -1834,7 +1830,7 @@ public class QueryStack {
|
|||
}
|
||||
|
||||
private boolean isEligibleForContainedResourceSearch(List<? extends IQueryParameterType> nextAnd) {
|
||||
return myModelConfig.isIndexOnContainedResources() &&
|
||||
return myStorageSettings.isIndexOnContainedResources() &&
|
||||
nextAnd.stream()
|
||||
.filter(t -> t instanceof ReferenceParam)
|
||||
.map(t -> ((ReferenceParam) t).getChain())
|
||||
|
|
|
@ -30,7 +30,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
|
@ -50,7 +50,6 @@ import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
|
|||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
|
@ -70,7 +69,6 @@ import ca.uhn.fhir.jpa.util.QueryChunker;
|
|||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import ca.uhn.fhir.jpa.util.SqlQueryList;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
||||
|
@ -162,14 +160,13 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory;
|
||||
private final SqlObjectFactory mySqlBuilderFactory;
|
||||
private final HibernatePropertiesProvider myDialectProvider;
|
||||
private final ModelConfig myModelConfig;
|
||||
private final ISearchParamRegistry mySearchParamRegistry;
|
||||
private final PartitionSettings myPartitionSettings;
|
||||
private final DaoRegistry myDaoRegistry;
|
||||
private final IResourceSearchViewDao myResourceSearchViewDao;
|
||||
private final FhirContext myContext;
|
||||
private final IIdHelperService<JpaPid> myIdHelperService;
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
private final IDao myCallingDao;
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
|
@ -195,11 +192,10 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
public SearchBuilder(
|
||||
IDao theDao,
|
||||
String theResourceName,
|
||||
DaoConfig theDaoConfig,
|
||||
JpaStorageSettings theStorageSettings,
|
||||
HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory,
|
||||
SqlObjectFactory theSqlBuilderFactory,
|
||||
HibernatePropertiesProvider theDialectProvider,
|
||||
ModelConfig theModelConfig,
|
||||
ISearchParamRegistry theSearchParamRegistry,
|
||||
PartitionSettings thePartitionSettings,
|
||||
IInterceptorBroadcaster theInterceptorBroadcaster,
|
||||
|
@ -213,12 +209,11 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
myCallingDao = theDao;
|
||||
myResourceName = theResourceName;
|
||||
myResourceType = theResourceType;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
|
||||
myEntityManagerFactory = theEntityManagerFactory;
|
||||
mySqlBuilderFactory = theSqlBuilderFactory;
|
||||
myDialectProvider = theDialectProvider;
|
||||
myModelConfig = theModelConfig;
|
||||
mySearchParamRegistry = theSearchParamRegistry;
|
||||
myPartitionSettings = thePartitionSettings;
|
||||
myInterceptorBroadcaster = theInterceptorBroadcaster;
|
||||
|
@ -281,7 +276,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
* parameters all have no modifiers.
|
||||
*/
|
||||
private boolean isCompositeUniqueSpCandidate() {
|
||||
return myDaoConfig.isUniqueIndexesEnabled() &&
|
||||
return myStorageSettings.isUniqueIndexesEnabled() &&
|
||||
myParams.getEverythingMode() == null &&
|
||||
myParams.isAllParametersHaveNoModifier();
|
||||
}
|
||||
|
@ -442,7 +437,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
private List<JpaPid> executeLastNAgainstIndex(Integer theMaximumResults) {
|
||||
// Can we use our hibernate search generated index on resource to support lastN?:
|
||||
if (myDaoConfig.isAdvancedHSearchIndexing()) {
|
||||
if (myStorageSettings.isAdvancedHSearchIndexing()) {
|
||||
if (myFulltextSearchSvc == null) {
|
||||
throw new InvalidRequestException(Msg.code(2027) + "LastN operation is not enabled on this service, can not process this request");
|
||||
}
|
||||
|
@ -529,8 +524,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
private Optional<SearchQueryExecutor> createChunkedQuery(SearchParameterMap theParams, SortSpec sort, Integer theOffset, Integer theMaximumResults, boolean theCountOnlyFlag, RequestDetails theRequest, List<Long> thePidList) {
|
||||
String sqlBuilderResourceName = myParams.getEverythingMode() == null ? myResourceName : null;
|
||||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCountOnlyFlag);
|
||||
QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
||||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCountOnlyFlag);
|
||||
QueryStack queryStack3 = new QueryStack(theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
||||
|
||||
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS) || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) {
|
||||
List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams(myResourceName, theParams.keySet());
|
||||
|
@ -554,7 +549,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
// is basically a reverse-include search. For type/Everything (as opposed to instance/Everything)
|
||||
// the one problem with this approach is that it doesn't catch Patients that have absolutely
|
||||
// nothing linked to them. So we do one additional query to make sure we catch those too.
|
||||
SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, myResourceName, mySqlBuilderFactory, myDialectProvider, theCountOnlyFlag);
|
||||
SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder(myContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, myResourceName, mySqlBuilderFactory, myDialectProvider, theCountOnlyFlag);
|
||||
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
|
||||
String sql = allTargetsSql.getSql();
|
||||
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
|
||||
|
@ -696,7 +691,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
}
|
||||
|
||||
private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) {
|
||||
return myModelConfig.isIndexOnContainedResources() && theParams.values().stream()
|
||||
return myStorageSettings.isIndexOnContainedResources() && theParams.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.flatMap(Collection::stream)
|
||||
.anyMatch(t -> t instanceof ReferenceParam);
|
||||
|
@ -843,7 +838,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
|
||||
Map<Long, Long> resourcePidToVersion = null;
|
||||
for (JpaPid next : thePids) {
|
||||
if (next.getVersion() != null && myModelConfig.isRespectVersionsForSearchIncludes()) {
|
||||
if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) {
|
||||
if (resourcePidToVersion == null) {
|
||||
resourcePidToVersion = new HashMap<>();
|
||||
}
|
||||
|
@ -999,8 +994,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
*/
|
||||
private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) {
|
||||
// is storage enabled?
|
||||
return myDaoConfig.isStoreResourceInHSearchIndex() &&
|
||||
myDaoConfig.isAdvancedHSearchIndexing() &&
|
||||
return myStorageSettings.isStoreResourceInHSearchIndex() &&
|
||||
myStorageSettings.isAdvancedHSearchIndexing() &&
|
||||
// we don't support history
|
||||
thePids.stream().noneMatch(p -> p.getVersion() != null) &&
|
||||
// skip the complexity for metadata in dstu2
|
||||
|
@ -1010,7 +1005,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) {
|
||||
// Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES only impl
|
||||
// to handle lastN?
|
||||
if (myDaoConfig.isAdvancedHSearchIndexing() && myDaoConfig.isStoreResourceInHSearchIndex()) {
|
||||
if (myStorageSettings.isAdvancedHSearchIndexing() && myStorageSettings.isStoreResourceInHSearchIndex()) {
|
||||
List<Long> pidList = thePids.stream().map(pid -> (pid).getId()).collect(Collectors.toList());
|
||||
|
||||
List<IBaseResource> resources = myFulltextSearchSvc.getResources(pidList);
|
||||
|
@ -1041,7 +1036,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
String findPidFieldName = theReverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID;
|
||||
String findResourceTypeFieldName = theReverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE;
|
||||
String findVersionFieldName = null;
|
||||
if (!theReverseMode && myModelConfig.isRespectVersionsForSearchIncludes()) {
|
||||
if (!theReverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) {
|
||||
findVersionFieldName = MY_TARGET_RESOURCE_VERSION;
|
||||
}
|
||||
|
||||
|
@ -1567,7 +1562,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
} else if (myParams.getOffset() != null && myParams.getCount() != null) {
|
||||
myMaxResultsToFetch = myParams.getCount();
|
||||
} else {
|
||||
myMaxResultsToFetch = myDaoConfig.getFetchSizeDefaultMaximum();
|
||||
myMaxResultsToFetch = myStorageSettings.getFetchSizeDefaultMaximum();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
|
||||
import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
||||
|
@ -55,8 +55,8 @@ public abstract class BasePredicateBuilder {
|
|||
return mySearchSqlBuilder.getResourceType();
|
||||
}
|
||||
|
||||
ModelConfig getModelConfig() {
|
||||
return mySearchSqlBuilder.getModelConfig();
|
||||
StorageSettings getStorageSettings() {
|
||||
return mySearchSqlBuilder.getStorageSettings();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
|
|
@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
|
@ -49,7 +49,7 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
private final DbColumn myColumnValueLowDateOrdinal;
|
||||
private final DbColumn myColumnValueHighDateOrdinal;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -64,8 +64,8 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
public Condition createPredicateDateWithoutIdentityPredicate(IQueryParameterType theParam,
|
||||
|
@ -109,10 +109,10 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
Comparable<?> genericUpperBound;
|
||||
|
||||
/*
|
||||
* If all present search parameters are of DAY precision, and {@link ca.uhn.fhir.jpa.model.entity.ModelConfig#getUseOrdinalDatesForDayPrecisionSearches()} is true,
|
||||
* If all present search parameters are of DAY precision, and {@link ca.uhn.fhir.jpa.model.entity.StorageSettings#getUseOrdinalDatesForDayPrecisionSearches()} is true,
|
||||
* then we attempt to use the ordinal field for date comparisons instead of the date field.
|
||||
*/
|
||||
boolean isOrdinalComparison = isNullOrDatePrecision(lowerBound) && isNullOrDatePrecision(upperBound) && myDaoConfig.getModelConfig().getUseOrdinalDatesForDayPrecisionSearches();
|
||||
boolean isOrdinalComparison = isNullOrDatePrecision(lowerBound) && isNullOrDatePrecision(upperBound) && myStorageSettings.getUseOrdinalDatesForDayPrecisionSearches();
|
||||
|
||||
Condition lt;
|
||||
Condition gt;
|
||||
|
@ -144,12 +144,12 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
// use lower bound first
|
||||
if (lowerBoundInstant != null) {
|
||||
lb = this.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound);
|
||||
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||
if (myStorageSettings.isAccountForDateIndexNulls()) {
|
||||
lb = ComboCondition.or(lb, this.createPredicate(highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound));
|
||||
}
|
||||
} else if (upperBoundInstant != null) {
|
||||
ub = this.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound);
|
||||
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||
if (myStorageSettings.isAccountForDateIndexNulls()) {
|
||||
ub = ComboCondition.or(ub, this.createPredicate(highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound));
|
||||
}
|
||||
} else {
|
||||
|
@ -159,12 +159,12 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
// use upper bound first, e.g value between 6 and 10
|
||||
if (upperBoundInstant != null) {
|
||||
ub = this.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericUpperBound);
|
||||
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||
if (myStorageSettings.isAccountForDateIndexNulls()) {
|
||||
ub = ComboCondition.or(ub, this.createPredicate(lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericUpperBound));
|
||||
}
|
||||
} else if (lowerBoundInstant != null) {
|
||||
lb = this.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound);
|
||||
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||
if (myStorageSettings.isAccountForDateIndexNulls()) {
|
||||
lb = ComboCondition.or(lb, this.createPredicate(lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound));
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.search.builder.models.MissingQueryParameterPredicateParams;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
|
||||
|
@ -29,7 +29,7 @@ public interface ICanMakeMissingParamPredicate {
|
|||
* Creates the condition for searching for a missing field
|
||||
* for a given SearchParameter type.
|
||||
*
|
||||
* Only use if {@link DaoConfig#getIndexMissingFields()} is disabled
|
||||
* Only use if {@link JpaStorageSettings#getIndexMissingFields()} is disabled
|
||||
*/
|
||||
Condition createPredicateParamMissingValue(MissingQueryParameterPredicateParams theParams);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
|
@ -114,7 +114,7 @@ public class ResourceLinkPredicateBuilder
|
|||
private final boolean myReversed;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
|
@ -195,7 +195,7 @@ public class ResourceLinkPredicateBuilder
|
|||
IIdType dt = new IdDt(ref.getBaseUrl(), ref.getResourceType(), ref.getIdPart(), null);
|
||||
|
||||
if (dt.hasBaseUrl()) {
|
||||
if (myDaoConfig.getTreatBaseUrlsAsLocal().contains(dt.getBaseUrl())) {
|
||||
if (myStorageSettings.getTreatBaseUrlsAsLocal().contains(dt.getBaseUrl())) {
|
||||
dt = dt.toUnqualified();
|
||||
targetIds.add(dt);
|
||||
} else {
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
|
@ -51,7 +51,7 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
private final DbColumn myColumnHashIdentity;
|
||||
private final DbColumn myColumnHashExact;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -94,7 +94,7 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
StringParam id = (StringParam) theParameter;
|
||||
rawSearchTerm = id.getValue();
|
||||
if (id.isContains()) {
|
||||
if (!myDaoConfig.isAllowContainsSearches()) {
|
||||
if (!myStorageSettings.isAllowContainsSearches()) {
|
||||
throw new MethodNotAllowedException(Msg.code(1258) + ":contains modifier is disabled on this server");
|
||||
}
|
||||
} else {
|
||||
|
@ -122,7 +122,7 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
String likeExpression;
|
||||
if ((theParameter instanceof StringParam) &&
|
||||
(((((StringParam) theParameter).isContains()) &&
|
||||
(myDaoConfig.isAllowContainsSearches())) ||
|
||||
(myStorageSettings.isAllowContainsSearches())) ||
|
||||
(operation == SearchFilterParser.CompareOperation.co))) {
|
||||
likeExpression = createLeftAndRightMatchLikeExpression(normalizedString);
|
||||
} else if ((operation != SearchFilterParser.CompareOperation.ne) &&
|
||||
|
@ -174,7 +174,7 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
|
||||
@Nonnull
|
||||
public Condition createPredicateNormalLike(String theResourceType, String theParamName, String theNormalizedString, String theLikeExpression) {
|
||||
Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(getPartitionSettings(), getRequestPartitionId(), getModelConfig(), theResourceType, theParamName, theNormalizedString);
|
||||
Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(getPartitionSettings(), getRequestPartitionId(), getStorageSettings(), theResourceType, theParamName, theNormalizedString);
|
||||
Condition hashPredicate = BinaryCondition.equalTo(myColumnHashNormPrefix, generatePlaceholder(hash));
|
||||
Condition valuePredicate = BinaryCondition.like(myColumnValueNormalized, generatePlaceholder(theLikeExpression));
|
||||
return ComboCondition.and(hashPredicate, valuePredicate);
|
||||
|
@ -182,7 +182,7 @@ public class StringPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
|
||||
@Nonnull
|
||||
public Condition createPredicateNormal(String theResourceType, String theParamName, String theNormalizedString) {
|
||||
Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(getPartitionSettings(), getRequestPartitionId(), getModelConfig(), theResourceType, theParamName, theNormalizedString);
|
||||
Long hash = ResourceIndexedSearchParamString.calculateHashNormalized(getPartitionSettings(), getRequestPartitionId(), getStorageSettings(), theResourceType, theParamName, theNormalizedString);
|
||||
Condition hashPredicate = BinaryCondition.equalTo(myColumnHashNormPrefix, generatePlaceholder(hash));
|
||||
Condition valuePredicate = BinaryCondition.equalTo(myColumnValueNormalized, generatePlaceholder(theNormalizedString));
|
||||
return ComboCondition.and(hashPredicate, valuePredicate);
|
||||
|
|
|
@ -32,14 +32,13 @@ import ca.uhn.fhir.context.support.ValidationSupportContext;
|
|||
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
|
||||
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
|
||||
|
@ -67,9 +66,6 @@ import java.util.Optional;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toAndPredicate;
|
||||
import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toEqualToOrInPredicate;
|
||||
import static ca.uhn.fhir.jpa.util.QueryParameterUtils.toOrPredicate;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -88,11 +84,9 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
@Autowired
|
||||
private ITermReadSvc myTerminologySvc;
|
||||
@Autowired
|
||||
private ModelConfig myModelConfig;
|
||||
@Autowired
|
||||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -181,7 +175,7 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
if (modifier == TokenParamModifier.IN || modifier == TokenParamModifier.NOT_IN) {
|
||||
if (myContext.getVersion().getVersion().isNewerThan(FhirVersionEnum.DSTU2)) {
|
||||
ValueSetExpansionOptions valueSetExpansionOptions = new ValueSetExpansionOptions();
|
||||
valueSetExpansionOptions.setCount(myDaoConfig.getMaximumExpansionSize());
|
||||
valueSetExpansionOptions.setCount(myStorageSettings.getMaximumExpansionSize());
|
||||
IValidationSupport.ValueSetExpansionOutcome expanded = myValidationSupport.expandValueSet(new ValidationSupportContext(myValidationSupport), valueSetExpansionOptions, code);
|
||||
|
||||
codes.addAll(extractValueSetCodes(expanded.getValueSet()));
|
||||
|
@ -200,7 +194,7 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
validateHaveSystemAndCodeForToken(paramName, code, system);
|
||||
codes.addAll(myTerminologySvc.findCodesBelow(system, code));
|
||||
} else if (modifier == TokenParamModifier.OF_TYPE) {
|
||||
if (!myModelConfig.isIndexIdentifierOfType()) {
|
||||
if (!myStorageSettings.isIndexIdentifierOfType()) {
|
||||
throw new MethodNotAllowedException(Msg.code(2012) + "The :of-type modifier is not enabled on this server");
|
||||
}
|
||||
if (isBlank(system) || isBlank(code)) {
|
||||
|
@ -270,7 +264,7 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
BaseRuntimeElementCompositeDefinition<?> expansionDef = (BaseRuntimeElementCompositeDefinition<?>) myContext.getElementDefinition(expansion.getClass());
|
||||
BaseRuntimeChildDefinition containsChild = expansionDef.getChildByName("contains");
|
||||
List<IBase> contains = containsChild.getAccessor().getValues(expansion);
|
||||
|
||||
|
||||
BaseRuntimeChildDefinition.IAccessor systemAccessor = null;
|
||||
BaseRuntimeChildDefinition.IAccessor codeAccessor = null;
|
||||
for (IBase nextContains : contains) {
|
||||
|
@ -282,13 +276,13 @@ public class TokenPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
}
|
||||
String system = systemAccessor
|
||||
.getFirstValueOrNull(nextContains)
|
||||
.map(t->(IPrimitiveType<?>)t)
|
||||
.map(t->t.getValueAsString())
|
||||
.map(t -> (IPrimitiveType<?>) t)
|
||||
.map(t -> t.getValueAsString())
|
||||
.orElse(null);
|
||||
String code = codeAccessor
|
||||
.getFirstValueOrNull(nextContains)
|
||||
.map(t->(IPrimitiveType<?>)t)
|
||||
.map(t->t.getValueAsString())
|
||||
.map(t -> (IPrimitiveType<?>) t)
|
||||
.map(t -> t.getValueAsString())
|
||||
.orElse(null);
|
||||
if (isNotBlank(system) && isNotBlank(code)) {
|
||||
retVal.add(new FhirVersionIndependentConcept(system, code));
|
||||
|
|
|
@ -26,7 +26,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.BaseJoiningPredicateBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.predicate.ComboNonUniqueSearchParameterPredicateBuilder;
|
||||
|
@ -97,7 +97,7 @@ public class SearchQueryBuilder {
|
|||
private final PartitionSettings myPartitionSettings;
|
||||
private final RequestPartitionId myRequestPartitionId;
|
||||
private final String myResourceType;
|
||||
private final ModelConfig myModelConfig;
|
||||
private final StorageSettings myStorageSettings;
|
||||
private final FhirContext myFhirContext;
|
||||
private final SqlObjectFactory mySqlBuilderFactory;
|
||||
private final boolean myCountQuery;
|
||||
|
@ -113,16 +113,16 @@ public class SearchQueryBuilder {
|
|||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public SearchQueryBuilder(FhirContext theFhirContext, ModelConfig theModelConfig, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, HibernatePropertiesProvider theDialectProvider, boolean theCountQuery) {
|
||||
this(theFhirContext, theModelConfig, thePartitionSettings, theRequestPartitionId, theResourceType, theSqlBuilderFactory, UUID.randomUUID() + "-", theDialectProvider.getDialect(), theCountQuery, new ArrayList<>());
|
||||
public SearchQueryBuilder(FhirContext theFhirContext, StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, HibernatePropertiesProvider theDialectProvider, boolean theCountQuery) {
|
||||
this(theFhirContext, theStorageSettings, thePartitionSettings, theRequestPartitionId, theResourceType, theSqlBuilderFactory, UUID.randomUUID() + "-", theDialectProvider.getDialect(), theCountQuery, new ArrayList<>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for child SQL Builders
|
||||
*/
|
||||
private SearchQueryBuilder(FhirContext theFhirContext, ModelConfig theModelConfig, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, String theBindVariableSubstitutionBase, Dialect theDialect, boolean theCountQuery, ArrayList<Object> theBindVariableValues) {
|
||||
private SearchQueryBuilder(FhirContext theFhirContext, StorageSettings theStorageSettings, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, String theBindVariableSubstitutionBase, Dialect theDialect, boolean theCountQuery, ArrayList<Object> theBindVariableValues) {
|
||||
myFhirContext = theFhirContext;
|
||||
myModelConfig = theModelConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
myPartitionSettings = thePartitionSettings;
|
||||
myRequestPartitionId = theRequestPartitionId;
|
||||
myResourceType = theResourceType;
|
||||
|
@ -438,12 +438,6 @@ public class SearchQueryBuilder {
|
|||
mySelect.addJoins(SelectQuery.JoinType.INNER, join);
|
||||
}
|
||||
|
||||
public void addJoinWithCustomOnCondition(DbTable theFromTable, DbTable theToTable, DbColumn theFromColumn, DbColumn theToColumn, Condition theCondition) {
|
||||
Join join = new DbJoin(mySpec, theFromTable, theToTable, new DbColumn[]{theFromColumn}, new DbColumn[]{theToColumn});
|
||||
// add hashIdentity codition here
|
||||
mySelect.addJoins(SelectQuery.JoinType.INNER, join);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate and return the SQL generated by this builder
|
||||
*/
|
||||
|
@ -598,7 +592,7 @@ public class SearchQueryBuilder {
|
|||
public List<String> generatePlaceholders(Collection<?> theValues) {
|
||||
return theValues
|
||||
.stream()
|
||||
.map(t -> generatePlaceholder(t))
|
||||
.map(this::generatePlaceholder)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
@ -627,8 +621,8 @@ public class SearchQueryBuilder {
|
|||
return myResourceType;
|
||||
}
|
||||
|
||||
public ModelConfig getModelConfig() {
|
||||
return myModelConfig;
|
||||
public StorageSettings getStorageSettings() {
|
||||
return myStorageSettings;
|
||||
}
|
||||
|
||||
public void addPredicate(@Nonnull Condition theCondition) {
|
||||
|
@ -708,13 +702,17 @@ public class SearchQueryBuilder {
|
|||
return BinaryCondition.greaterThanOrEq(theColumn, generatePlaceholder(theValue));
|
||||
case NOT_EQUAL:
|
||||
return BinaryCondition.notEqualTo(theColumn, generatePlaceholder(theValue));
|
||||
case STARTS_AFTER:
|
||||
case APPROXIMATE:
|
||||
case ENDS_BEFORE:
|
||||
case EQUAL:
|
||||
default:
|
||||
throw new IllegalArgumentException(Msg.code(1263));
|
||||
}
|
||||
}
|
||||
|
||||
public SearchQueryBuilder newChildSqlBuilder() {
|
||||
return new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, myResourceType, mySqlBuilderFactory, myBindVariableSubstitutionBase, myDialect, false, myBindVariableValues);
|
||||
return new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, myResourceType, mySqlBuilderFactory, myBindVariableSubstitutionBase, myDialect, false, myBindVariableValues);
|
||||
}
|
||||
|
||||
public SelectQuery getSelect() {
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.search.builder.tasks;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
|
@ -50,7 +50,7 @@ public class SearchContinuationTask extends SearchTask {
|
|||
IInterceptorBroadcaster theInterceptorBroadcaster,
|
||||
SearchBuilderFactory theSearchBuilderFactory,
|
||||
ISearchResultCacheSvc theSearchResultCacheSvc,
|
||||
DaoConfig theDaoConfig,
|
||||
JpaStorageSettings theStorageSettings,
|
||||
ISearchCacheSvc theSearchCacheSvc,
|
||||
IPagingProvider thePagingProvider,
|
||||
ExceptionService theExceptionSvc
|
||||
|
@ -62,7 +62,7 @@ public class SearchContinuationTask extends SearchTask {
|
|||
theInterceptorBroadcaster,
|
||||
theSearchBuilderFactory,
|
||||
theSearchResultCacheSvc,
|
||||
theDaoConfig,
|
||||
theStorageSettings,
|
||||
theSearchCacheSvc,
|
||||
thePagingProvider
|
||||
);
|
||||
|
|
|
@ -26,7 +26,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
|
@ -124,7 +124,7 @@ public class SearchTask implements Callable<Void> {
|
|||
private final IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
private final SearchBuilderFactory<JpaPid> mySearchBuilderFactory;
|
||||
protected final ISearchResultCacheSvc mySearchResultCacheSvc;
|
||||
private final DaoConfig myDaoConfig;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
private final ISearchCacheSvc mySearchCacheSvc;
|
||||
private final IPagingProvider myPagingProvider;
|
||||
|
||||
|
@ -138,7 +138,7 @@ public class SearchTask implements Callable<Void> {
|
|||
IInterceptorBroadcaster theInterceptorBroadcaster,
|
||||
SearchBuilderFactory theSearchBuilderFactory,
|
||||
ISearchResultCacheSvc theSearchResultCacheSvc,
|
||||
DaoConfig theDaoConfig,
|
||||
JpaStorageSettings theStorageSettings,
|
||||
ISearchCacheSvc theSearchCacheSvc,
|
||||
IPagingProvider thePagingProvider
|
||||
) {
|
||||
|
@ -148,7 +148,7 @@ public class SearchTask implements Callable<Void> {
|
|||
myInterceptorBroadcaster = theInterceptorBroadcaster;
|
||||
mySearchBuilderFactory = theSearchBuilderFactory;
|
||||
mySearchResultCacheSvc = theSearchResultCacheSvc;
|
||||
myDaoConfig = theDaoConfig;
|
||||
myStorageSettings = theStorageSettings;
|
||||
mySearchCacheSvc = theSearchCacheSvc;
|
||||
myPagingProvider = thePagingProvider;
|
||||
|
||||
|
@ -341,9 +341,9 @@ public class SearchTask implements Callable<Void> {
|
|||
numSynced = mySyncedPids.size();
|
||||
}
|
||||
|
||||
if (myDaoConfig.getCountSearchResultsUpTo() == null ||
|
||||
myDaoConfig.getCountSearchResultsUpTo() <= 0 ||
|
||||
myDaoConfig.getCountSearchResultsUpTo() <= numSynced) {
|
||||
if (myStorageSettings.getCountSearchResultsUpTo() == null ||
|
||||
myStorageSettings.getCountSearchResultsUpTo() <= 0 ||
|
||||
myStorageSettings.getCountSearchResultsUpTo() <= numSynced) {
|
||||
myInitialCollectionLatch.countDown();
|
||||
}
|
||||
|
||||
|
@ -487,7 +487,7 @@ public class SearchTask implements Callable<Void> {
|
|||
* before doing anything else.
|
||||
*/
|
||||
boolean myParamWantOnlyCount = isWantOnlyCount(myParams);
|
||||
boolean myParamOrDefaultWantCount = nonNull(myParams.getSearchTotalMode()) ? isWantCount(myParams) : SearchParameterMapCalculator.isWantCount(myDaoConfig.getDefaultTotalMode());
|
||||
boolean myParamOrDefaultWantCount = nonNull(myParams.getSearchTotalMode()) ? isWantCount(myParams) : SearchParameterMapCalculator.isWantCount(myStorageSettings.getDefaultTotalMode());
|
||||
|
||||
if (myParamWantOnlyCount || myParamOrDefaultWantCount) {
|
||||
ourLog.trace("Performing count");
|
||||
|
@ -524,7 +524,7 @@ public class SearchTask implements Callable<Void> {
|
|||
/*
|
||||
* Figure out how many results we're actually going to fetch from the
|
||||
* database in this pass. This calculation takes into consideration the
|
||||
* "pre-fetch thresholds" specified in DaoConfig#getSearchPreFetchThresholds()
|
||||
* "pre-fetch thresholds" specified in StorageSettings#getSearchPreFetchThresholds()
|
||||
* as well as the value of the _count parameter.
|
||||
*/
|
||||
int currentlyLoaded = defaultIfNull(mySearch.getNumFound(), 0);
|
||||
|
@ -535,7 +535,7 @@ public class SearchTask implements Callable<Void> {
|
|||
minWanted += currentlyLoaded;
|
||||
}
|
||||
|
||||
for (Iterator<Integer> iter = myDaoConfig.getSearchPreFetchThresholds().iterator(); iter.hasNext(); ) {
|
||||
for (Iterator<Integer> iter = myStorageSettings.getSearchPreFetchThresholds().iterator(); iter.hasNext(); ) {
|
||||
int next = iter.next();
|
||||
if (next != -1 && next <= currentlyLoaded) {
|
||||
continue;
|
||||
|
@ -599,9 +599,9 @@ public class SearchTask implements Callable<Void> {
|
|||
|
||||
boolean shouldSync = myUnsyncedPids.size() >= syncSize;
|
||||
|
||||
if (myDaoConfig.getCountSearchResultsUpTo() != null &&
|
||||
myDaoConfig.getCountSearchResultsUpTo() > 0 &&
|
||||
myDaoConfig.getCountSearchResultsUpTo() < myUnsyncedPids.size()) {
|
||||
if (myStorageSettings.getCountSearchResultsUpTo() != null &&
|
||||
myStorageSettings.getCountSearchResultsUpTo() > 0 &&
|
||||
myStorageSettings.getCountSearchResultsUpTo() < myUnsyncedPids.size()) {
|
||||
shouldSync = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.search.cache;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchIncludeDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
|
@ -81,7 +81,7 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
|
|||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setCutoffSlackForUnitTest(long theCutoffSlack) {
|
||||
|
@ -156,13 +156,13 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
|
|||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public void pollForStaleSearchesAndDeleteThem() {
|
||||
if (!myDaoConfig.isExpireSearchResults()) {
|
||||
if (!myStorageSettings.isExpireSearchResults()) {
|
||||
return;
|
||||
}
|
||||
|
||||
long cutoffMillis = myDaoConfig.getExpireSearchResultsAfterMillis();
|
||||
if (myDaoConfig.getReuseCachedSearchResultsForMillis() != null) {
|
||||
cutoffMillis = cutoffMillis + myDaoConfig.getReuseCachedSearchResultsForMillis();
|
||||
long cutoffMillis = myStorageSettings.getExpireSearchResultsAfterMillis();
|
||||
if (myStorageSettings.getReuseCachedSearchResultsForMillis() != null) {
|
||||
cutoffMillis = cutoffMillis + myStorageSettings.getReuseCachedSearchResultsForMillis();
|
||||
}
|
||||
final Date cutoff = new Date((now() - cutoffMillis) - myCutoffSlack);
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.search.elastic;
|
|||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
|
||||
import org.hibernate.search.backend.elasticsearch.logging.impl.Log;
|
||||
|
@ -37,43 +37,48 @@ import java.util.regex.Pattern;
|
|||
/**
|
||||
* This class instructs hibernate search on how to create index names for indexed entities.
|
||||
* In our case, we use this class to add an optional prefix to all indices which are created, which can be controlled via
|
||||
* {@link DaoConfig#setHSearchIndexPrefix(String)}.
|
||||
* {@link JpaStorageSettings#setHSearchIndexPrefix(String)}.
|
||||
*/
|
||||
@Service
|
||||
public class IndexNamePrefixLayoutStrategy implements IndexLayoutStrategy {
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
static final Log log = LoggerFactory.make(Log.class, MethodHandles.lookup());
|
||||
public static final String NAME = "prefix";
|
||||
public static final Pattern UNIQUE_KEY_EXTRACTION_PATTERN = Pattern.compile("(.*)-\\d{6}");
|
||||
|
||||
@Override
|
||||
public String createInitialElasticsearchIndexName(String hibernateSearchIndexName) {
|
||||
return addPrefixIfNecessary(hibernateSearchIndexName + "-000001");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String createWriteAlias(String hibernateSearchIndexName) {
|
||||
return addPrefixIfNecessary(hibernateSearchIndexName +"-write");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String createReadAlias(String hibernateSearchIndexName) {
|
||||
return addPrefixIfNecessary(hibernateSearchIndexName + "-read");
|
||||
}
|
||||
|
||||
private String addPrefixIfNecessary(String theCandidateName) {
|
||||
validateDaoConfigIsPresent();
|
||||
if (!StringUtils.isBlank(myDaoConfig.getHSearchIndexPrefix())) {
|
||||
return myDaoConfig.getHSearchIndexPrefix() + "-" + theCandidateName;
|
||||
validateStorageSettingsIsPresent();
|
||||
if (!StringUtils.isBlank(myStorageSettings.getHSearchIndexPrefix())) {
|
||||
return myStorageSettings.getHSearchIndexPrefix() + "-" + theCandidateName;
|
||||
} else {
|
||||
return theCandidateName;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String extractUniqueKeyFromHibernateSearchIndexName(String hibernateSearchIndexName) {
|
||||
return hibernateSearchIndexName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String extractUniqueKeyFromElasticsearchIndexName(String elasticsearchIndexName) {
|
||||
Matcher matcher = UNIQUE_KEY_EXTRACTION_PATTERN.matcher(elasticsearchIndexName);
|
||||
if (!matcher.matches()) {
|
||||
|
@ -85,16 +90,16 @@ public class IndexNamePrefixLayoutStrategy implements IndexLayoutStrategy {
|
|||
}
|
||||
|
||||
private String removePrefixIfNecessary(String theCandidateUniqueKey) {
|
||||
validateDaoConfigIsPresent();
|
||||
if (!StringUtils.isBlank(myDaoConfig.getHSearchIndexPrefix())) {
|
||||
return theCandidateUniqueKey.replace(myDaoConfig.getHSearchIndexPrefix() + "-", "");
|
||||
validateStorageSettingsIsPresent();
|
||||
if (!StringUtils.isBlank(myStorageSettings.getHSearchIndexPrefix())) {
|
||||
return theCandidateUniqueKey.replace(myStorageSettings.getHSearchIndexPrefix() + "-", "");
|
||||
} else {
|
||||
return theCandidateUniqueKey;
|
||||
}
|
||||
}
|
||||
private void validateDaoConfigIsPresent() {
|
||||
if (myDaoConfig == null) {
|
||||
throw new ConfigurationException(Msg.code(1168) + "While attempting to boot HAPI FHIR, the Hibernate Search bootstrapper failed to find the DaoConfig. This probably means Hibernate Search has been recently upgraded, or somebody modified HapiFhirLocalContainerEntityManagerFactoryBean.");
|
||||
private void validateStorageSettingsIsPresent() {
|
||||
if (myStorageSettings == null) {
|
||||
throw new ConfigurationException(Msg.code(1168) + "While attempting to boot HAPI FHIR, the Hibernate Search bootstrapper failed to find the StorageSettings. This probably means Hibernate Search has been recently upgraded, or somebody modified HapiFhirLocalContainerEntityManagerFactoryBean.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.search.reindex;
|
|||
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
|
@ -94,7 +94,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
|
|||
@Autowired
|
||||
private IResourceReindexJobDao myReindexJobDao;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
private TransactionTemplate myTxTemplate;
|
||||
|
@ -116,8 +116,8 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
|
|||
private ResourceReindexer myResourceReindexer;
|
||||
|
||||
@VisibleForTesting
|
||||
void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
void setStorageSettingsForUnitTest(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
@ -133,7 +133,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
|
|||
|
||||
public void initExecutor() {
|
||||
// Create the threadpool executor used for reindex jobs
|
||||
int reindexThreadCount = myDaoConfig.getReindexThreadCount();
|
||||
int reindexThreadCount = myStorageSettings.getReindexThreadCount();
|
||||
RejectedExecutionHandler rejectHandler = new BlockPolicy();
|
||||
myTaskExecutor = new ThreadPoolExecutor(0, reindexThreadCount,
|
||||
0L, TimeUnit.MILLISECONDS,
|
||||
|
@ -202,7 +202,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
|
|||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public Integer runReindexingPass() {
|
||||
if (myDaoConfig.isSchedulingDisabled() || !myDaoConfig.isEnableTaskPreExpandValueSets()) {
|
||||
if (myStorageSettings.isSchedulingDisabled() || !myStorageSettings.isEnableTaskPreExpandValueSets()) {
|
||||
return null;
|
||||
}
|
||||
if (myIndexingLock.tryLock()) {
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.ConfigurationException;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.WarmCacheEntry;
|
||||
|
@ -55,7 +55,7 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc, IHasScheduledJobs
|
|||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(CacheWarmingSvcImpl.class);
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
private Map<WarmCacheEntry, Long> myCacheEntryToNextRefresh = new LinkedHashMap<>();
|
||||
@Autowired
|
||||
private FhirContext myCtx;
|
||||
|
@ -131,7 +131,7 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc, IHasScheduledJobs
|
|||
public synchronized Set<WarmCacheEntry> initCacheMap() {
|
||||
|
||||
myCacheEntryToNextRefresh.clear();
|
||||
List<WarmCacheEntry> warmCacheEntries = myDaoConfig.getWarmCacheEntries();
|
||||
List<WarmCacheEntry> warmCacheEntries = myStorageSettings.getWarmCacheEntries();
|
||||
for (WarmCacheEntry next : warmCacheEntries) {
|
||||
|
||||
// Validate
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.sp;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -46,17 +46,17 @@ public class SearchParamPresenceSvcImpl implements ISearchParamPresenceSvc {
|
|||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDaoConfig(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
public void setStorageSettings(JpaStorageSettings theStorageSettings) {
|
||||
myStorageSettings = theStorageSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddRemoveCount updatePresence(ResourceTable theResource, Map<String, Boolean> theParamNameToPresence) {
|
||||
AddRemoveCount retVal = new AddRemoveCount();
|
||||
if (myDaoConfig.getIndexMissingFields() == DaoConfig.IndexEnabledEnum.DISABLED) {
|
||||
if (myStorageSettings.getIndexMissingFields() == JpaStorageSettings.IndexEnabledEnum.DISABLED) {
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
|
@ -120,7 +120,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
@Autowired
|
||||
private ITermReadSvc myTerminologySvc;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
|
||||
|
@ -499,7 +499,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
termConceptProperty.setConcept(theConceptToAdd);
|
||||
termConceptProperty.setCodeSystemVersion(theCsv);
|
||||
});
|
||||
if (theStatisticsTracker.getUpdatedConceptCount() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
|
||||
if (theStatisticsTracker.getUpdatedConceptCount() <= myStorageSettings.getDeferIndexingForCodesystemsOfSize()) {
|
||||
saveConcept(conceptToAdd);
|
||||
Long nextConceptPid = conceptToAdd.getId();
|
||||
Validate.notNull(nextConceptPid);
|
||||
|
@ -522,7 +522,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
conceptToAdd.getParents().add(parentLink);
|
||||
ourLog.info("Saving parent/child link - Parent[{}] Child[{}]", parentLink.getParent().getCode(), parentLink.getChild().getCode());
|
||||
|
||||
if (theStatisticsTracker.getUpdatedConceptCount() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
|
||||
if (theStatisticsTracker.getUpdatedConceptCount() <= myStorageSettings.getDeferIndexingForCodesystemsOfSize()) {
|
||||
myConceptParentChildLinkDao.save(parentLink);
|
||||
} else {
|
||||
myDeferredStorageSvc.addConceptLinkToStorageQueue(parentLink);
|
||||
|
@ -574,7 +574,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
theConcept.setCodeSystemVersion(theCodeSystem);
|
||||
theConcept.setIndexStatus(BaseHapiFhirDao.INDEX_STATUS_INDEXED);
|
||||
|
||||
if (theConceptsStack.size() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
|
||||
if (theConceptsStack.size() <= myStorageSettings.getDeferIndexingForCodesystemsOfSize()) {
|
||||
saveConcept(theConcept);
|
||||
} else {
|
||||
myDeferredStorageSvc.addConceptToStorageQueue(theConcept);
|
||||
|
@ -585,7 +585,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
}
|
||||
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
if (theConceptsStack.size() <= myDaoConfig.getDeferIndexingForCodesystemsOfSize()) {
|
||||
if (theConceptsStack.size() <= myStorageSettings.getDeferIndexingForCodesystemsOfSize()) {
|
||||
saveConceptLink(next);
|
||||
} else {
|
||||
myDeferredStorageSvc.addConceptLinkToStorageQueue(next);
|
||||
|
|
|
@ -374,7 +374,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas
|
|||
request.setParameters(parameters);
|
||||
|
||||
Batch2JobStartResponse response = myJobCoordinator.startInstance(request);
|
||||
myJobExecutions.add(response.getJobId());
|
||||
myJobExecutions.add(response.getInstanceId());
|
||||
}
|
||||
|
||||
private void deleteTermCodeSystemOffline(Long theCodeSystemPid) {
|
||||
|
@ -384,7 +384,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas
|
|||
request.setParameters(parameters);
|
||||
request.setJobDefinitionId(TERM_CODE_SYSTEM_DELETE_JOB_NAME);
|
||||
Batch2JobStartResponse response = myJobCoordinator.startInstance(request);
|
||||
myJobExecutions.add(response.getJobId());
|
||||
myJobExecutions.add(response.getInstanceId());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ import ca.uhn.fhir.context.support.ValidationSupportContext;
|
|||
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
|
@ -235,7 +235,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
@Autowired
|
||||
private ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
private TransactionTemplate myTxTemplate;
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
|
@ -350,8 +350,8 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
private boolean addToSet(Set<TermConcept> theSetToPopulate, TermConcept theConcept) {
|
||||
boolean retVal = theSetToPopulate.add(theConcept);
|
||||
if (retVal) {
|
||||
if (theSetToPopulate.size() >= myDaoConfig.getMaximumExpansionSize()) {
|
||||
String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "expansionTooLarge", myDaoConfig.getMaximumExpansionSize());
|
||||
if (theSetToPopulate.size() >= myStorageSettings.getMaximumExpansionSize()) {
|
||||
String msg = myContext.getLocalizer().getMessage(TermReadSvcImpl.class, "expansionTooLarge", myStorageSettings.getMaximumExpansionSize());
|
||||
throw new ExpansionTooCostlyException(Msg.code(885) + msg);
|
||||
}
|
||||
}
|
||||
|
@ -432,7 +432,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
int count = expansionOptions.getCount();
|
||||
|
||||
ValueSetExpansionComponentWithConceptAccumulator accumulator = new ValueSetExpansionComponentWithConceptAccumulator(myContext, count, expansionOptions.isIncludeHierarchy());
|
||||
accumulator.setHardExpansionMaximumSize(myDaoConfig.getMaximumExpansionSize());
|
||||
accumulator.setHardExpansionMaximumSize(myStorageSettings.getMaximumExpansionSize());
|
||||
accumulator.setSkipCountRemaining(offset);
|
||||
accumulator.setIdentifier(UUID.randomUUID().toString());
|
||||
accumulator.setTimestamp(new Date());
|
||||
|
@ -535,7 +535,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
offset = Math.min(offset, theTermValueSet.getTotalConcepts().intValue());
|
||||
|
||||
Integer count = theAccumulator.getCapacityRemaining();
|
||||
count = defaultIfNull(count, myDaoConfig.getMaximumExpansionSize());
|
||||
count = defaultIfNull(count, myStorageSettings.getMaximumExpansionSize());
|
||||
|
||||
int conceptsExpanded = 0;
|
||||
int designationsExpanded = 0;
|
||||
|
@ -1900,7 +1900,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
|
||||
@Override
|
||||
public synchronized void preExpandDeferredValueSetsToTerminologyTables() {
|
||||
if (!myDaoConfig.isEnableTaskPreExpandValueSets()) {
|
||||
if (!myStorageSettings.isEnableTaskPreExpandValueSets()) {
|
||||
return;
|
||||
}
|
||||
if (isNotSafeToPreExpandValueSets()) {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue