Fix date search on period with no end (#2676)
* Add test * Working test * Update test * Account for date fixes * Add changelog * Add setting * Test fixes * Test fix
This commit is contained in:
parent
f774653239
commit
c8c596aff4
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 2676
|
||||||
|
title: "A new config option has been added to the DaoConfig that causes generated SQL statements to
|
||||||
|
account for potential null values in HAPI FHIR JPA date index rows. Nulls are no longer ever
|
||||||
|
used in this table after HAPI FHIR 5.3.0, but legacy data may still have nulls."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 2676
|
||||||
|
title: "A new setting has been added to the DaoConfig that allows the maximum number of
|
||||||
|
`_include` and `_revinclude` resources to be added to a single search page result."
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 2676
|
||||||
|
title: "When performing non-query cache JPA searches (i.e. searches with `Cache-Control: no-store`)
|
||||||
|
the loading of `_include` and `_revinclude` will now factor the maximum include count."
|
|
@ -0,0 +1,6 @@
|
||||||
|
---
|
||||||
|
type: fix
|
||||||
|
issue: 2676
|
||||||
|
title: "Subscription notifications will no longer be triggered by default in response to changes
|
||||||
|
that do not increment the resource version (e.g. `$meta-add` and `$meta-delete`). A new
|
||||||
|
DaoConfig setting has been added to make this configurable."
|
|
@ -16,6 +16,8 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import javax.validation.constraints.Null;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -76,6 +78,12 @@ public class DaoConfig {
|
||||||
// update setter javadoc if default changes
|
// update setter javadoc if default changes
|
||||||
public static final int DEFAULT_MAX_EXPANSION_SIZE = 1000;
|
public static final int DEFAULT_MAX_EXPANSION_SIZE = 1000;
|
||||||
public static final HistoryCountModeEnum DEFAULT_HISTORY_COUNT_MODE = HistoryCountModeEnum.CACHED_ONLY_WITHOUT_OFFSET;
|
public static final HistoryCountModeEnum DEFAULT_HISTORY_COUNT_MODE = HistoryCountModeEnum.CACHED_ONLY_WITHOUT_OFFSET;
|
||||||
|
/**
|
||||||
|
* This constant applies to task enablement, e.g. {@link #setEnableTaskStaleSearchCleanup(boolean)}.
|
||||||
|
* <p>
|
||||||
|
* By default, all are enabled.
|
||||||
|
*/
|
||||||
|
public static final boolean DEFAULT_ENABLE_TASKS = true;
|
||||||
/**
|
/**
|
||||||
* Default value for {@link #setMaximumSearchResultCountInTransaction(Integer)}
|
* Default value for {@link #setMaximumSearchResultCountInTransaction(Integer)}
|
||||||
*
|
*
|
||||||
|
@ -86,19 +94,11 @@ public class DaoConfig {
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(DaoConfig.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(DaoConfig.class);
|
||||||
private static final int DEFAULT_EXPUNGE_BATCH_SIZE = 800;
|
private static final int DEFAULT_EXPUNGE_BATCH_SIZE = 800;
|
||||||
private static final int DEFAULT_MAXIMUM_DELETE_CONFLICT_COUNT = 60;
|
private static final int DEFAULT_MAXIMUM_DELETE_CONFLICT_COUNT = 60;
|
||||||
|
|
||||||
/**
|
|
||||||
* This constant applies to task enablement, e.g. {@link #setEnableTaskStaleSearchCleanup(boolean)}.
|
|
||||||
*
|
|
||||||
* By default, all are enabled.
|
|
||||||
*/
|
|
||||||
public static final boolean DEFAULT_ENABLE_TASKS = true;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Child Configurations
|
* Child Configurations
|
||||||
*/
|
*/
|
||||||
private static final Integer DEFAULT_INTERNAL_SYNCHRONOUS_SEARCH_SIZE = 10000;
|
private static final Integer DEFAULT_INTERNAL_SYNCHRONOUS_SEARCH_SIZE = 10000;
|
||||||
|
public static final int DEFAULT_MAXIMUM_INCLUDES_TO_LOAD_PER_PAGE = 1000;
|
||||||
private final ModelConfig myModelConfig = new ModelConfig();
|
private final ModelConfig myModelConfig = new ModelConfig();
|
||||||
/**
|
/**
|
||||||
* Do not change default of {@code 0}!
|
* Do not change default of {@code 0}!
|
||||||
|
@ -106,6 +106,12 @@ public class DaoConfig {
|
||||||
* @since 4.1.0
|
* @since 4.1.0
|
||||||
*/
|
*/
|
||||||
private final int myPreExpandValueSetsDefaultOffset = 0;
|
private final int myPreExpandValueSetsDefaultOffset = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @since 5.5.0
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
private Integer myMaximumIncludesToLoadPerPage = DEFAULT_MAXIMUM_INCLUDES_TO_LOAD_PER_PAGE;
|
||||||
private IndexEnabledEnum myIndexMissingFieldsEnabled = IndexEnabledEnum.DISABLED;
|
private IndexEnabledEnum myIndexMissingFieldsEnabled = IndexEnabledEnum.DISABLED;
|
||||||
/**
|
/**
|
||||||
* update setter javadoc if default changes
|
* update setter javadoc if default changes
|
||||||
|
@ -165,7 +171,6 @@ public class DaoConfig {
|
||||||
private StoreMetaSourceInformationEnum myStoreMetaSourceInformation = StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID;
|
private StoreMetaSourceInformationEnum myStoreMetaSourceInformation = StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID;
|
||||||
private HistoryCountModeEnum myHistoryCountMode = DEFAULT_HISTORY_COUNT_MODE;
|
private HistoryCountModeEnum myHistoryCountMode = DEFAULT_HISTORY_COUNT_MODE;
|
||||||
private int myInternalSynchronousSearchSize = DEFAULT_INTERNAL_SYNCHRONOUS_SEARCH_SIZE;
|
private int myInternalSynchronousSearchSize = DEFAULT_INTERNAL_SYNCHRONOUS_SEARCH_SIZE;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* update setter javadoc if default changes
|
* update setter javadoc if default changes
|
||||||
*/
|
*/
|
||||||
|
@ -223,7 +228,6 @@ public class DaoConfig {
|
||||||
* @since 5.5.0
|
* @since 5.5.0
|
||||||
*/
|
*/
|
||||||
private boolean myEnableTaskStaleSearchCleanup;
|
private boolean myEnableTaskStaleSearchCleanup;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @since 5.5.0
|
* @since 5.5.0
|
||||||
*/
|
*/
|
||||||
|
@ -236,6 +240,8 @@ public class DaoConfig {
|
||||||
* @since 5.5.0
|
* @since 5.5.0
|
||||||
*/
|
*/
|
||||||
private boolean myEnableTaskBulkExportJobExecution;
|
private boolean myEnableTaskBulkExportJobExecution;
|
||||||
|
private boolean myAccountForDateIndexNulls;
|
||||||
|
private boolean myTriggerSubscriptionsForNonVersioningChanges;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
|
@ -259,6 +265,29 @@ public class DaoConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the maximum number of <code>_include</code> and <code>_revinclude</code> results to return in a
|
||||||
|
* single page of results. The default is <code>1000</code>, and <code>null</code> may be used
|
||||||
|
* to indicate that there is no limit.
|
||||||
|
*
|
||||||
|
* @since 5.5.0
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public Integer getMaximumIncludesToLoadPerPage() {
|
||||||
|
return myMaximumIncludesToLoadPerPage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the maximum number of <code>_include</code> and <code>_revinclude</code> results to return in a
|
||||||
|
* single page of results. The default is <code>1000</code>, and <code>null</code> may be used
|
||||||
|
* to indicate that there is no limit.
|
||||||
|
*
|
||||||
|
* @since 5.5.0
|
||||||
|
*/
|
||||||
|
public void setMaximumIncludesToLoadPerPage(@Nullable Integer theMaximumIncludesToLoadPerPage) {
|
||||||
|
myMaximumIncludesToLoadPerPage = theMaximumIncludesToLoadPerPage;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* When performing a FHIR history operation, a <code>Bundle.total</code> value is included in the
|
* When performing a FHIR history operation, a <code>Bundle.total</code> value is included in the
|
||||||
* response, indicating the total number of history entries. This response is calculated using a
|
* response, indicating the total number of history entries. This response is calculated using a
|
||||||
|
@ -2225,6 +2254,7 @@ public class DaoConfig {
|
||||||
* 1. Delete with _expunge parameter.
|
* 1. Delete with _expunge parameter.
|
||||||
* 2. Searching for Code System IDs by System and Code
|
* 2. Searching for Code System IDs by System and Code
|
||||||
* </p>
|
* </p>
|
||||||
|
*
|
||||||
* @since 5.4.0
|
* @since 5.4.0
|
||||||
*/
|
*/
|
||||||
public Integer getInternalSynchronousSearchSize() {
|
public Integer getInternalSynchronousSearchSize() {
|
||||||
|
@ -2237,6 +2267,7 @@ public class DaoConfig {
|
||||||
* 1. Delete with _expunge parameter.
|
* 1. Delete with _expunge parameter.
|
||||||
* 2. Searching for Code System IDs by System and Code
|
* 2. Searching for Code System IDs by System and Code
|
||||||
* </p>
|
* </p>
|
||||||
|
*
|
||||||
* @since 5.4.0
|
* @since 5.4.0
|
||||||
*/
|
*/
|
||||||
public void setInternalSynchronousSearchSize(Integer theInternalSynchronousSearchSize) {
|
public void setInternalSynchronousSearchSize(Integer theInternalSynchronousSearchSize) {
|
||||||
|
@ -2270,8 +2301,8 @@ public class DaoConfig {
|
||||||
*
|
*
|
||||||
* @since 5.5.0
|
* @since 5.5.0
|
||||||
*/
|
*/
|
||||||
public void setEnableTaskBulkExportJobExecution(boolean theEnableTaskBulkExportJobExecution) {
|
public boolean isEnableTaskBulkExportJobExecution() {
|
||||||
myEnableTaskBulkExportJobExecution = theEnableTaskBulkExportJobExecution;
|
return myEnableTaskBulkExportJobExecution;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -2280,11 +2311,10 @@ public class DaoConfig {
|
||||||
*
|
*
|
||||||
* @since 5.5.0
|
* @since 5.5.0
|
||||||
*/
|
*/
|
||||||
public boolean isEnableTaskBulkExportJobExecution() {
|
public void setEnableTaskBulkExportJobExecution(boolean theEnableTaskBulkExportJobExecution) {
|
||||||
return myEnableTaskBulkExportJobExecution;
|
myEnableTaskBulkExportJobExecution = theEnableTaskBulkExportJobExecution;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If this is enabled (this is the default), this server will attempt to pre-expand any ValueSets that
|
* If this is enabled (this is the default), this server will attempt to pre-expand any ValueSets that
|
||||||
* have been uploaded and are not yet pre-expanded. Otherwise, this server will not.
|
* have been uploaded and are not yet pre-expanded. Otherwise, this server will not.
|
||||||
|
@ -2331,8 +2361,8 @@ public class DaoConfig {
|
||||||
*
|
*
|
||||||
* @since 5.5.0
|
* @since 5.5.0
|
||||||
*/
|
*/
|
||||||
public void setEnableTaskResourceReindexing(boolean theEnableTaskResourceReindexing) {
|
public boolean isEnableTaskResourceReindexing() {
|
||||||
myEnableTaskResourceReindexing = theEnableTaskResourceReindexing;
|
return myEnableTaskResourceReindexing;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -2341,8 +2371,50 @@ public class DaoConfig {
|
||||||
*
|
*
|
||||||
* @since 5.5.0
|
* @since 5.5.0
|
||||||
*/
|
*/
|
||||||
public boolean isEnableTaskResourceReindexing() {
|
public void setEnableTaskResourceReindexing(boolean theEnableTaskResourceReindexing) {
|
||||||
return myEnableTaskResourceReindexing;
|
myEnableTaskResourceReindexing = theEnableTaskResourceReindexing;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If set to true (default is false), date indexes will account for null values in the range columns. As of 5.3.0
|
||||||
|
* we no longer place null values in these columns, but legacy data may exist that still has these values. Note that
|
||||||
|
* enabling this results in more complexity in the search SQL.
|
||||||
|
*
|
||||||
|
* @since 5.5.0
|
||||||
|
*/
|
||||||
|
public void setAccountForDateIndexNulls(boolean theAccountForDateIndexNulls) {
|
||||||
|
myAccountForDateIndexNulls = theAccountForDateIndexNulls;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If set to true (default is false), date indexes will account for null values in the range columns. As of 5.3.0
|
||||||
|
* we no longer place null values in these columns, but legacy data may exist that still has these values. Note that
|
||||||
|
* enabling this results in more complexity in the search SQL.
|
||||||
|
*
|
||||||
|
* @since 5.5.0
|
||||||
|
*/
|
||||||
|
public boolean isAccountForDateIndexNulls() {
|
||||||
|
return myAccountForDateIndexNulls;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If set to true (default is false) then subscriptions will be triggered for resource updates even if they
|
||||||
|
* do not trigger a new version (e.g. $meta-add and $meta-delete).
|
||||||
|
*
|
||||||
|
* @since 5.5.0
|
||||||
|
*/
|
||||||
|
public boolean isTriggerSubscriptionsForNonVersioningChanges() {
|
||||||
|
return myTriggerSubscriptionsForNonVersioningChanges;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If set to true (default is false) then subscriptions will be triggered for resource updates even if they
|
||||||
|
* do not trigger a new version (e.g. $meta-add and $meta-delete).
|
||||||
|
*
|
||||||
|
* @since 5.5.0
|
||||||
|
*/
|
||||||
|
public void setTriggerSubscriptionsForNonVersioningChanges(boolean theTriggerSubscriptionsForNonVersioningChanges) {
|
||||||
|
myTriggerSubscriptionsForNonVersioningChanges = theTriggerSubscriptionsForNonVersioningChanges;
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum StoreMetaSourceInformationEnum {
|
public enum StoreMetaSourceInformationEnum {
|
||||||
|
|
|
@ -34,7 +34,6 @@ import javax.annotation.Nonnull;
|
||||||
import javax.persistence.EntityManager;
|
import javax.persistence.EntityManager;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -49,7 +48,7 @@ public interface ISearchBuilder {
|
||||||
void loadResourcesByPid(Collection<ResourcePersistentId> thePids, Collection<ResourcePersistentId> theIncludedPids, List<IBaseResource> theResourceListToPopulate, boolean theForHistoryOperation, RequestDetails theDetails);
|
void loadResourcesByPid(Collection<ResourcePersistentId> thePids, Collection<ResourcePersistentId> theIncludedPids, List<IBaseResource> theResourceListToPopulate, boolean theForHistoryOperation, RequestDetails theDetails);
|
||||||
|
|
||||||
Set<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Set<Include> theRevIncludes, boolean theReverseMode,
|
Set<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Set<Include> theRevIncludes, boolean theReverseMode,
|
||||||
DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest);
|
DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest, Integer theMaxCount);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* How many results may be fetched at once
|
* How many results may be fetched at once
|
||||||
|
|
|
@ -688,7 +688,7 @@ public class LegacySearchBuilder implements ISearchBuilder {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public HashSet<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Set<Include> theRevIncludes,
|
public HashSet<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Set<Include> theRevIncludes,
|
||||||
boolean theReverseMode, DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest) {
|
boolean theReverseMode, DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest, Integer theMaxCount) {
|
||||||
if (theMatches.size() == 0) {
|
if (theMatches.size() == 0) {
|
||||||
return new HashSet<>();
|
return new HashSet<>();
|
||||||
}
|
}
|
||||||
|
@ -1037,7 +1037,7 @@ public class LegacySearchBuilder implements ISearchBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<Include> includes = Collections.singleton(new Include("*", true));
|
Set<Include> includes = Collections.singleton(new Include("*", true));
|
||||||
Set<ResourcePersistentId> newPids = loadIncludes(myContext, myEntityManager, myCurrentPids, includes, false, getParams().getLastUpdated(), mySearchUuid, myRequest);
|
Set<ResourcePersistentId> newPids = loadIncludes(myContext, myEntityManager, myCurrentPids, includes, false, getParams().getLastUpdated(), mySearchUuid, myRequest, null);
|
||||||
if (newPids.isEmpty()) {
|
if (newPids.isEmpty()) {
|
||||||
myNext = NO_MORE;
|
myNext = NO_MORE;
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -199,9 +199,15 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
|
||||||
if (lowerBoundInstant != null) {
|
if (lowerBoundInstant != null) {
|
||||||
// the value has been reduced one in this case
|
// the value has been reduced one in this case
|
||||||
lb = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericLowerBound);
|
lb = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericLowerBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
lb = theBuilder.or(lb, theBuilder.lessThanOrEqualTo(theFrom.get(highValueField), genericLowerBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if (upperBoundInstant != null) {
|
if (upperBoundInstant != null) {
|
||||||
ub = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericUpperBound);
|
ub = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericUpperBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
ub = theBuilder.or(ub, theBuilder.lessThanOrEqualTo(theFrom.get(highValueField), genericUpperBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new InvalidRequestException("lowerBound and upperBound value not correctly specified for compare theOperation");
|
throw new InvalidRequestException("lowerBound and upperBound value not correctly specified for compare theOperation");
|
||||||
}
|
}
|
||||||
|
@ -210,9 +216,15 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
|
||||||
// use lower bound first
|
// use lower bound first
|
||||||
if (lowerBoundInstant != null) {
|
if (lowerBoundInstant != null) {
|
||||||
lb = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericLowerBound);
|
lb = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericLowerBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
lb = theBuilder.or(lb, theBuilder.lessThanOrEqualTo(theFrom.get(highValueField), genericLowerBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if (upperBoundInstant != null) {
|
if (upperBoundInstant != null) {
|
||||||
ub = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericUpperBound);
|
ub = theBuilder.lessThanOrEqualTo(theFrom.get(lowValueField), genericUpperBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
ub = theBuilder.or(ub, theBuilder.lessThanOrEqualTo(theFrom.get(highValueField), genericUpperBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new InvalidRequestException("lowerBound and upperBound value not correctly specified for compare theOperation");
|
throw new InvalidRequestException("lowerBound and upperBound value not correctly specified for compare theOperation");
|
||||||
}
|
}
|
||||||
|
@ -222,9 +234,15 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
|
||||||
// gt7 true, 10>7, gt11 false, 10>11 false, gt5 true, 10>5
|
// gt7 true, 10>7, gt11 false, 10>11 false, gt5 true, 10>5
|
||||||
if (upperBoundInstant != null) {
|
if (upperBoundInstant != null) {
|
||||||
ub = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericUpperBound);
|
ub = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericUpperBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
ub = theBuilder.or(ub, theBuilder.greaterThanOrEqualTo(theFrom.get(lowValueField), genericUpperBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if (lowerBoundInstant != null) {
|
if (lowerBoundInstant != null) {
|
||||||
lb = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericLowerBound);
|
lb = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericLowerBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
lb = theBuilder.or(lb, theBuilder.greaterThanOrEqualTo(theFrom.get(lowValueField), genericLowerBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new InvalidRequestException("upperBound and lowerBound value not correctly specified for compare theOperation");
|
throw new InvalidRequestException("upperBound and lowerBound value not correctly specified for compare theOperation");
|
||||||
}
|
}
|
||||||
|
@ -233,10 +251,16 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
|
||||||
// use upper bound first, e.g value between 6 and 10
|
// use upper bound first, e.g value between 6 and 10
|
||||||
// gt7 true, 10>7, gt11 false, 10>11 false, gt5 true, 10>5
|
// gt7 true, 10>7, gt11 false, 10>11 false, gt5 true, 10>5
|
||||||
if (upperBoundInstant != null) {
|
if (upperBoundInstant != null) {
|
||||||
ub = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericUpperBound);;
|
ub = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericUpperBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
ub = theBuilder.or(ub, theBuilder.greaterThanOrEqualTo(theFrom.get(lowValueField), genericUpperBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if (lowerBoundInstant != null) {
|
if (lowerBoundInstant != null) {
|
||||||
lb = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericLowerBound);
|
lb = theBuilder.greaterThanOrEqualTo(theFrom.get(highValueField), genericLowerBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
lb = theBuilder.or(lb, theBuilder.greaterThanOrEqualTo(theFrom.get(lowValueField), genericLowerBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new InvalidRequestException("upperBound and lowerBound value not correctly specified for compare theOperation");
|
throw new InvalidRequestException("upperBound and lowerBound value not correctly specified for compare theOperation");
|
||||||
}
|
}
|
||||||
|
|
|
@ -411,8 +411,12 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
||||||
Set<ResourcePersistentId> includedPids = new HashSet<>();
|
Set<ResourcePersistentId> includedPids = new HashSet<>();
|
||||||
|
|
||||||
if (mySearchEntity.getSearchType() == SearchTypeEnum.SEARCH) {
|
if (mySearchEntity.getSearchType() == SearchTypeEnum.SEARCH) {
|
||||||
includedPids.addAll(theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated(), myUuid, myRequest));
|
Integer maxIncludes = myDaoConfig.getMaximumIncludesToLoadPerPage();
|
||||||
includedPids.addAll(theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated(), myUuid, myRequest));
|
includedPids.addAll(theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes));
|
||||||
|
if (maxIncludes != null) {
|
||||||
|
maxIncludes -= includedPids.size();
|
||||||
|
}
|
||||||
|
includedPids.addAll(theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<ResourcePersistentId> includedPidList = new ArrayList<>(includedPids);
|
List<ResourcePersistentId> includedPidList = new ArrayList<>(includedPids);
|
||||||
|
@ -441,6 +445,11 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
||||||
myDaoRegistry = theDaoRegistry;
|
myDaoRegistry = theDaoRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public void setDaoConfigForUnitTest(DaoConfig theDaoConfig) {
|
||||||
|
myDaoConfig = theDaoConfig;
|
||||||
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
public void setSearchBuilderFactoryForUnitTest(SearchBuilderFactory theSearchBuilderFactory) {
|
public void setSearchBuilderFactoryForUnitTest(SearchBuilderFactory theSearchBuilderFactory) {
|
||||||
mySearchBuilderFactory = theSearchBuilderFactory;
|
mySearchBuilderFactory = theSearchBuilderFactory;
|
||||||
|
|
|
@ -532,11 +532,16 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
||||||
* On the other hand for async queries we load includes/revincludes
|
* On the other hand for async queries we load includes/revincludes
|
||||||
* individually for pages as we return them to clients
|
* individually for pages as we return them to clients
|
||||||
*/
|
*/
|
||||||
final Set<ResourcePersistentId> includedPids = new HashSet<>();
|
|
||||||
|
|
||||||
includedPids.addAll(theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated(), "(synchronous)", theRequestDetails));
|
Integer maxIncludes = myDaoConfig.getMaximumIncludesToLoadPerPage();
|
||||||
if (theParams.getEverythingMode() == null) {
|
final Set<ResourcePersistentId> includedPids = theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated(), "(synchronous)", theRequestDetails, maxIncludes);
|
||||||
includedPids.addAll(theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated(), "(synchronous)", theRequestDetails));
|
|
||||||
|
if (maxIncludes != null) {
|
||||||
|
maxIncludes -= includedPids.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (theParams.getEverythingMode() == null && (maxIncludes == null || maxIncludes > 0)) {
|
||||||
|
includedPids.addAll(theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated(), "(synchronous)", theRequestDetails, maxIncludes));
|
||||||
}
|
}
|
||||||
|
|
||||||
List<ResourcePersistentId> includedPidsList = new ArrayList<>(includedPids);
|
List<ResourcePersistentId> includedPidsList = new ArrayList<>(includedPids);
|
||||||
|
|
|
@ -115,7 +115,6 @@ import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -757,7 +756,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Set<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Set<Include> theIncludes,
|
public Set<ResourcePersistentId> loadIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<ResourcePersistentId> theMatches, Set<Include> theIncludes,
|
||||||
boolean theReverseMode, DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest) {
|
boolean theReverseMode, DateRangeParam theLastUpdated, String theSearchIdOrDescription, RequestDetails theRequest, Integer theMaxCount) {
|
||||||
if (theMatches.size() == 0) {
|
if (theMatches.size() == 0) {
|
||||||
return new HashSet<>();
|
return new HashSet<>();
|
||||||
}
|
}
|
||||||
|
@ -833,6 +832,9 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
if (wantResourceType != null) {
|
if (wantResourceType != null) {
|
||||||
q.setParameter("want_resource_type", wantResourceType);
|
q.setParameter("want_resource_type", wantResourceType);
|
||||||
}
|
}
|
||||||
|
if (theMaxCount != null) {
|
||||||
|
q.setMaxResults(theMaxCount);
|
||||||
|
}
|
||||||
List<?> results = q.getResultList();
|
List<?> results = q.getResultList();
|
||||||
for (Object nextRow : results) {
|
for (Object nextRow : results) {
|
||||||
if (nextRow == null) {
|
if (nextRow == null) {
|
||||||
|
@ -909,6 +911,9 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
q.setParameter("target_resource_types", param.getTargets());
|
q.setParameter("target_resource_types", param.getTargets());
|
||||||
}
|
}
|
||||||
List<?> results = q.getResultList();
|
List<?> results = q.getResultList();
|
||||||
|
if (theMaxCount != null) {
|
||||||
|
q.setMaxResults(theMaxCount);
|
||||||
|
}
|
||||||
for (Object resourceLink : results) {
|
for (Object resourceLink : results) {
|
||||||
if (resourceLink != null) {
|
if (resourceLink != null) {
|
||||||
ResourcePersistentId persistentId;
|
ResourcePersistentId persistentId;
|
||||||
|
@ -941,6 +946,11 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
addedSomeThisRound = allAdded.addAll(pidsToInclude);
|
addedSomeThisRound = allAdded.addAll(pidsToInclude);
|
||||||
|
|
||||||
|
if (theMaxCount != null && allAdded.size() >= theMaxCount) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
} while (includes.size() > 0 && nextRoundMatches.size() > 0 && addedSomeThisRound);
|
} while (includes.size() > 0 && nextRoundMatches.size() > 0 && addedSomeThisRound);
|
||||||
|
|
||||||
allAdded.removeAll(original);
|
allAdded.removeAll(original);
|
||||||
|
@ -1124,7 +1134,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
if (myCurrentIterator == null) {
|
if (myCurrentIterator == null) {
|
||||||
Set<Include> includes = Collections.singleton(new Include("*", true));
|
Set<Include> includes = Collections.singleton(new Include("*", true));
|
||||||
Set<ResourcePersistentId> newPids = loadIncludes(myContext, myEntityManager, myCurrentPids, includes, false, getParams().getLastUpdated(), mySearchUuid, myRequest);
|
Set<ResourcePersistentId> newPids = loadIncludes(myContext, myEntityManager, myCurrentPids, includes, false, getParams().getLastUpdated(), mySearchUuid, myRequest, null);
|
||||||
myCurrentIterator = newPids.iterator();
|
myCurrentIterator = newPids.iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -136,8 +136,14 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder {
|
||||||
// use lower bound first
|
// use lower bound first
|
||||||
if (lowerBoundInstant != null) {
|
if (lowerBoundInstant != null) {
|
||||||
lb = theFrom.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound);
|
lb = theFrom.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
lb = ComboCondition.or(lb, theFrom.createPredicate(highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound));
|
||||||
|
}
|
||||||
} else if (upperBoundInstant != null) {
|
} else if (upperBoundInstant != null) {
|
||||||
ub = theFrom.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound);
|
ub = theFrom.createPredicate(lowValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericUpperBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
ub = ComboCondition.or(ub, theFrom.createPredicate(highValueField, ParamPrefixEnum.LESSTHAN_OR_EQUALS, genericLowerBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new InvalidRequestException("lowerBound and upperBound value not correctly specified for comparing " + theOperation);
|
throw new InvalidRequestException("lowerBound and upperBound value not correctly specified for comparing " + theOperation);
|
||||||
}
|
}
|
||||||
|
@ -145,8 +151,14 @@ public class DatePredicateBuilder extends BaseSearchParamPredicateBuilder {
|
||||||
// use upper bound first, e.g value between 6 and 10
|
// use upper bound first, e.g value between 6 and 10
|
||||||
if (upperBoundInstant != null) {
|
if (upperBoundInstant != null) {
|
||||||
ub = theFrom.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericUpperBound);
|
ub = theFrom.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericUpperBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
ub = ComboCondition.or(ub, theFrom.createPredicate(lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound));
|
||||||
|
}
|
||||||
} else if (lowerBoundInstant != null) {
|
} else if (lowerBoundInstant != null) {
|
||||||
lb = theFrom.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound);
|
lb = theFrom.createPredicate(highValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound);
|
||||||
|
if (myDaoConfig.isAccountForDateIndexNulls()) {
|
||||||
|
lb = ComboCondition.or(lb, theFrom.createPredicate(lowValueField, ParamPrefixEnum.GREATERTHAN_OR_EQUALS, genericLowerBound));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new InvalidRequestException("upperBound and lowerBound value not correctly specified for compare theOperation");
|
throw new InvalidRequestException("upperBound and lowerBound value not correctly specified for compare theOperation");
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamDateDao;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
|
||||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||||
|
@ -151,6 +152,8 @@ public abstract class BaseJpaTest extends BaseTest {
|
||||||
@Autowired
|
@Autowired
|
||||||
protected IResourceIndexedSearchParamTokenDao myResourceIndexedSearchParamTokenDao;
|
protected IResourceIndexedSearchParamTokenDao myResourceIndexedSearchParamTokenDao;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
protected IResourceIndexedSearchParamDateDao myResourceIndexedSearchParamDateDao;
|
||||||
|
@Autowired
|
||||||
private IdHelperService myIdHelperService;
|
private IdHelperService myIdHelperService;
|
||||||
@Autowired
|
@Autowired
|
||||||
private MemoryCacheService myMemoryCacheService;
|
private MemoryCacheService myMemoryCacheService;
|
||||||
|
@ -239,6 +242,12 @@ public abstract class BaseJpaTest extends BaseTest {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected void logAllDateIndexes() {
|
||||||
|
runInTransaction(() -> {
|
||||||
|
ourLog.info("Date indexes:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
protected void logAllTokenIndexes() {
|
protected void logAllTokenIndexes() {
|
||||||
runInTransaction(() -> {
|
runInTransaction(() -> {
|
||||||
ourLog.info("Token indexes:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
ourLog.info("Token indexes:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||||
|
|
|
@ -5,7 +5,6 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
|
||||||
import ca.uhn.fhir.jpa.entity.Search;
|
import ca.uhn.fhir.jpa.entity.Search;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
|
@ -88,7 +87,6 @@ import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
|
||||||
import org.hl7.fhir.r4.model.EpisodeOfCare;
|
import org.hl7.fhir.r4.model.EpisodeOfCare;
|
||||||
import org.hl7.fhir.r4.model.Group;
|
import org.hl7.fhir.r4.model.Group;
|
||||||
import org.hl7.fhir.r4.model.IdType;
|
import org.hl7.fhir.r4.model.IdType;
|
||||||
import org.hl7.fhir.r4.model.Immunization;
|
|
||||||
import org.hl7.fhir.r4.model.InstantType;
|
import org.hl7.fhir.r4.model.InstantType;
|
||||||
import org.hl7.fhir.r4.model.IntegerType;
|
import org.hl7.fhir.r4.model.IntegerType;
|
||||||
import org.hl7.fhir.r4.model.Location;
|
import org.hl7.fhir.r4.model.Location;
|
||||||
|
@ -102,6 +100,7 @@ import org.hl7.fhir.r4.model.Organization;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.hl7.fhir.r4.model.Period;
|
import org.hl7.fhir.r4.model.Period;
|
||||||
import org.hl7.fhir.r4.model.Practitioner;
|
import org.hl7.fhir.r4.model.Practitioner;
|
||||||
|
import org.hl7.fhir.r4.model.Procedure;
|
||||||
import org.hl7.fhir.r4.model.Provenance;
|
import org.hl7.fhir.r4.model.Provenance;
|
||||||
import org.hl7.fhir.r4.model.Quantity;
|
import org.hl7.fhir.r4.model.Quantity;
|
||||||
import org.hl7.fhir.r4.model.Questionnaire;
|
import org.hl7.fhir.r4.model.Questionnaire;
|
||||||
|
@ -182,6 +181,7 @@ public class FhirResourceDaoR4LegacySearchBuilderTest extends BaseJpaR4Test {
|
||||||
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
myDaoConfig.setSearchPreFetchThresholds(new DaoConfig().getSearchPreFetchThresholds());
|
||||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||||
myDaoConfig.setUseLegacySearchBuilder(false);
|
myDaoConfig.setUseLegacySearchBuilder(false);
|
||||||
|
myDaoConfig.setAccountForDateIndexNulls(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
|
@ -578,6 +578,188 @@ public class FhirResourceDaoR4LegacySearchBuilderTest extends BaseJpaR4Test {
|
||||||
assertThat(ids, contains(moId.getValue()));
|
assertThat(ids, contains(moId.getValue()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simulate data that had been loaded in before we started adding placeholder dates to complete the range
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testDateOnPeriod_NoEnd_NoMaxValueInDatabase_NonLegacySearchBuilder() {
|
||||||
|
myDaoConfig.setUseLegacySearchBuilder(false);
|
||||||
|
myDaoConfig.setAccountForDateIndexNulls(true);
|
||||||
|
|
||||||
|
Patient pt = new Patient();
|
||||||
|
pt.setId("PT");
|
||||||
|
pt.setActive(true);
|
||||||
|
myPatientDao.update(pt);
|
||||||
|
|
||||||
|
// Should match
|
||||||
|
Procedure proc = new Procedure();
|
||||||
|
proc.setId("A");
|
||||||
|
proc.setSubject(new Reference("Patient/PT"));
|
||||||
|
proc.setPerformed(new Period().setStartElement(new DateTimeType("2021-03-16T23:50:06-04:00")));
|
||||||
|
myProcedureDao.update(proc);
|
||||||
|
|
||||||
|
// Shouldn't match
|
||||||
|
proc = new Procedure();
|
||||||
|
proc.setId("B");
|
||||||
|
proc.setSubject(new Reference("Patient/PT"));
|
||||||
|
proc.setPerformed(new Period().setStartElement(new DateTimeType("2021-12-31T23:50:06-04:00")));
|
||||||
|
myProcedureDao.update(proc);
|
||||||
|
|
||||||
|
logAllDateIndexes();
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
myEntityManager.createQuery("UPDATE ResourceIndexedSearchParamDate d SET d.myValueHigh = NULL").executeUpdate();
|
||||||
|
myEntityManager.createQuery("UPDATE ResourceIndexedSearchParamDate d SET d.myValueHighDateOrdinal = NULL").executeUpdate();
|
||||||
|
});
|
||||||
|
|
||||||
|
logAllDateIndexes();
|
||||||
|
|
||||||
|
SearchParameterMap map;
|
||||||
|
IBundleProvider outcome;
|
||||||
|
List<String> ids;
|
||||||
|
|
||||||
|
// >= date / <= date
|
||||||
|
map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("A"))
|
||||||
|
.add("patient", new ReferenceParam("PT"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, "2021-03-15"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.LESSTHAN_OR_EQUALS, "2021-03-18"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
outcome = myProcedureDao.search(map);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
assertThat(ids, contains("Procedure/A"));
|
||||||
|
|
||||||
|
// > date / < date
|
||||||
|
map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("A"))
|
||||||
|
.add("patient", new ReferenceParam("PT"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.GREATERTHAN, "2021-03-15"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.LESSTHAN, "2021-03-18"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
outcome = myProcedureDao.search(map);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
assertThat(ids, contains("Procedure/A"));
|
||||||
|
|
||||||
|
// >= datetime / <= datetime
|
||||||
|
map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("A"))
|
||||||
|
.add("patient", new ReferenceParam("PT"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, "2021-03-15T00:11:22Z"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.LESSTHAN_OR_EQUALS, "2021-03-18T00:11:22Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
outcome = myProcedureDao.search(map);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
assertThat(ids, contains("Procedure/A"));
|
||||||
|
|
||||||
|
// > datetime / < datetime
|
||||||
|
map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("A"))
|
||||||
|
.add("patient", new ReferenceParam("PT"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.GREATERTHAN, "2021-03-15T00:11:22Z"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.LESSTHAN, "2021-03-18T00:11:22Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
outcome = myProcedureDao.search(map);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
assertThat(ids, contains("Procedure/A"));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simulate data that had been loaded in before we started adding placeholder dates to complete the range
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testDateOnPeriod_NoEnd_NoMaxValueInDatabase_LegacySearchBuilder() {
|
||||||
|
myDaoConfig.setUseLegacySearchBuilder(true);
|
||||||
|
myDaoConfig.setAccountForDateIndexNulls(true);
|
||||||
|
|
||||||
|
Patient pt = new Patient();
|
||||||
|
pt.setId("PT");
|
||||||
|
pt.setActive(true);
|
||||||
|
myPatientDao.update(pt);
|
||||||
|
|
||||||
|
// Should match
|
||||||
|
Procedure proc = new Procedure();
|
||||||
|
proc.setId("A");
|
||||||
|
proc.setSubject(new Reference("Patient/PT"));
|
||||||
|
proc.setPerformed(new Period().setStartElement(new DateTimeType("2021-03-16T23:50:06-04:00")));
|
||||||
|
myProcedureDao.update(proc);
|
||||||
|
|
||||||
|
// Shouldn't match
|
||||||
|
proc = new Procedure();
|
||||||
|
proc.setId("B");
|
||||||
|
proc.setSubject(new Reference("Patient/PT"));
|
||||||
|
proc.setPerformed(new Period().setStartElement(new DateTimeType("2021-12-31T23:50:06-04:00")));
|
||||||
|
myProcedureDao.update(proc);
|
||||||
|
|
||||||
|
logAllDateIndexes();
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
myEntityManager.createQuery("UPDATE ResourceIndexedSearchParamDate d SET d.myValueHigh = NULL").executeUpdate();
|
||||||
|
myEntityManager.createQuery("UPDATE ResourceIndexedSearchParamDate d SET d.myValueHighDateOrdinal = NULL").executeUpdate();
|
||||||
|
});
|
||||||
|
|
||||||
|
logAllDateIndexes();
|
||||||
|
|
||||||
|
SearchParameterMap map;
|
||||||
|
IBundleProvider outcome;
|
||||||
|
List<String> ids;
|
||||||
|
|
||||||
|
// >= date / <= date
|
||||||
|
map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("A"))
|
||||||
|
.add("patient", new ReferenceParam("PT"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, "2021-03-15"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.LESSTHAN_OR_EQUALS, "2021-03-18"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
outcome = myProcedureDao.search(map);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
assertThat(ids, contains("Procedure/A"));
|
||||||
|
|
||||||
|
// > date / < date
|
||||||
|
map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("A"))
|
||||||
|
.add("patient", new ReferenceParam("PT"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.GREATERTHAN, "2021-03-15"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.LESSTHAN, "2021-03-18"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
outcome = myProcedureDao.search(map);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
assertThat(ids, contains("Procedure/A"));
|
||||||
|
|
||||||
|
// >= datetime / <= datetime
|
||||||
|
map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("A"))
|
||||||
|
.add("patient", new ReferenceParam("PT"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.GREATERTHAN_OR_EQUALS, "2021-03-15T00:11:22Z"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.LESSTHAN_OR_EQUALS, "2021-03-18T00:11:22Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
outcome = myProcedureDao.search(map);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
assertThat(ids, contains("Procedure/A"));
|
||||||
|
|
||||||
|
// > datetime / < datetime
|
||||||
|
map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("A"))
|
||||||
|
.add("patient", new ReferenceParam("PT"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.GREATERTHAN, "2021-03-15T00:11:22Z"))
|
||||||
|
.add("date", new DateParam(ParamPrefixEnum.LESSTHAN, "2021-03-18T00:11:22Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
outcome = myProcedureDao.search(map);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(outcome);
|
||||||
|
assertThat(ids, contains("Procedure/A"));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testEmptyChain() {
|
public void testEmptyChain() {
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,129 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.model.api.Include;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
|
import org.hamcrest.Matcher;
|
||||||
|
import org.hamcrest.collection.IsIterableContainingInAnyOrder;
|
||||||
|
import org.hl7.fhir.r4.model.EpisodeOfCare;
|
||||||
|
import org.hl7.fhir.r4.model.Organization;
|
||||||
|
import org.hl7.fhir.r4.model.Reference;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.IntStream;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
@SuppressWarnings({"unchecked", "Duplicates"})
|
||||||
|
public class FhirResourceDaoR4SearchIncludeTest extends BaseJpaR4Test {
|
||||||
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4SearchIncludeTest.class);
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void afterEach() {
|
||||||
|
myDaoConfig.setMaximumIncludesToLoadPerPage(DaoConfig.DEFAULT_MAXIMUM_INCLUDES_TO_LOAD_PER_PAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testIncludesNotAppliedToIncludedResources() {
|
||||||
|
createOrganizationWithReferencingEpisodesOfCare(10);
|
||||||
|
|
||||||
|
SearchParameterMap map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("EOC-0"))
|
||||||
|
.addInclude(new Include("*"))
|
||||||
|
.addRevInclude(new Include("*").setRecurse(true));
|
||||||
|
IBundleProvider results = myEpisodeOfCareDao.search(map);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
assertThat(ids.toString(), ids, containsInAnyOrder("EpisodeOfCare/EOC-0", "Organization/ORG-0"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRevIncludesPaged_SyncSearchWithCount() {
|
||||||
|
createOrganizationWithReferencingEpisodesOfCare(10);
|
||||||
|
|
||||||
|
SearchParameterMap map = SearchParameterMap.newSynchronous()
|
||||||
|
.setCount(10)
|
||||||
|
.addInclude(new Include("*"))
|
||||||
|
.addRevInclude(new Include("*").setRecurse(true));
|
||||||
|
IBundleProvider results = myOrganizationDao.search(map);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
Collection<Matcher<String>> expected = IntStream.range(0, 10).mapToObj(t -> equalTo("EpisodeOfCare/EOC-" + t)).collect(Collectors.toList());
|
||||||
|
expected.add(equalTo("Organization/ORG-0"));
|
||||||
|
expected.add(equalTo("Organization/ORG-P"));
|
||||||
|
assertThat(ids.toString(), ids, new IsIterableContainingInAnyOrder(expected));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRevIncludesPaged_SyncSearchWithoutCount() {
|
||||||
|
createOrganizationWithReferencingEpisodesOfCare(10);
|
||||||
|
myDaoConfig.setMaximumIncludesToLoadPerPage(5);
|
||||||
|
|
||||||
|
logAllResourceLinks();
|
||||||
|
|
||||||
|
SearchParameterMap map = SearchParameterMap.newSynchronous()
|
||||||
|
.add("_id", new TokenParam("ORG-0"))
|
||||||
|
.addInclude(new Include("*"))
|
||||||
|
.addRevInclude(new Include("*").setRecurse(true));
|
||||||
|
IBundleProvider results = myOrganizationDao.search(map);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
assertThat(ids.toString(), ids, containsInAnyOrder(
|
||||||
|
"EpisodeOfCare/EOC-0",
|
||||||
|
"EpisodeOfCare/EOC-1",
|
||||||
|
"EpisodeOfCare/EOC-2",
|
||||||
|
"EpisodeOfCare/EOC-3",
|
||||||
|
"EpisodeOfCare/EOC-4",
|
||||||
|
"Organization/ORG-0"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRevIncludesPaged_AsyncSearch() {
|
||||||
|
int eocCount = 10;
|
||||||
|
myDaoConfig.setMaximumIncludesToLoadPerPage(5);
|
||||||
|
|
||||||
|
createOrganizationWithReferencingEpisodesOfCare(eocCount);
|
||||||
|
|
||||||
|
SearchParameterMap map = new SearchParameterMap()
|
||||||
|
.setCount(10)
|
||||||
|
.addInclude(new Include("*"))
|
||||||
|
.addRevInclude(new Include("*").setRecurse(true));
|
||||||
|
IBundleProvider results = myOrganizationDao.search(map);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
assertThat(ids.toString(), ids, containsInAnyOrder(
|
||||||
|
"EpisodeOfCare/EOC-0",
|
||||||
|
"EpisodeOfCare/EOC-1",
|
||||||
|
"EpisodeOfCare/EOC-2",
|
||||||
|
"EpisodeOfCare/EOC-3",
|
||||||
|
"Organization/ORG-0",
|
||||||
|
"Organization/ORG-P"
|
||||||
|
));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createOrganizationWithReferencingEpisodesOfCare(int theEocCount) {
|
||||||
|
Organization org = new Organization();
|
||||||
|
org.setId("Organization/ORG-P");
|
||||||
|
org.setName("ORG-P");
|
||||||
|
myOrganizationDao.update(org);
|
||||||
|
|
||||||
|
org = new Organization();
|
||||||
|
org.setId("Organization/ORG-0");
|
||||||
|
org.setName("ORG-0");
|
||||||
|
org.setPartOf(new Reference("Organization/ORG-P"));
|
||||||
|
myOrganizationDao.update(org);
|
||||||
|
|
||||||
|
for (int i = 0; i < theEocCount; i++) {
|
||||||
|
EpisodeOfCare eoc = new EpisodeOfCare();
|
||||||
|
eoc.setId("EpisodeOfCare/EOC-" + i);
|
||||||
|
eoc.getManagingOrganization().setReference("Organization/ORG-0");
|
||||||
|
myEpisodeOfCareDao.update(eoc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -104,6 +104,7 @@ import org.hl7.fhir.r4.model.Organization;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.hl7.fhir.r4.model.Period;
|
import org.hl7.fhir.r4.model.Period;
|
||||||
import org.hl7.fhir.r4.model.Practitioner;
|
import org.hl7.fhir.r4.model.Practitioner;
|
||||||
|
import org.hl7.fhir.r4.model.Procedure;
|
||||||
import org.hl7.fhir.r4.model.Provenance;
|
import org.hl7.fhir.r4.model.Provenance;
|
||||||
import org.hl7.fhir.r4.model.Quantity;
|
import org.hl7.fhir.r4.model.Quantity;
|
||||||
import org.hl7.fhir.r4.model.Questionnaire;
|
import org.hl7.fhir.r4.model.Questionnaire;
|
||||||
|
|
|
@ -320,6 +320,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
SearchCoordinatorSvcImpl.SearchTask searchTask = t.getArgument(2, SearchCoordinatorSvcImpl.SearchTask.class);
|
SearchCoordinatorSvcImpl.SearchTask searchTask = t.getArgument(2, SearchCoordinatorSvcImpl.SearchTask.class);
|
||||||
ISearchBuilder searchBuilder = t.getArgument(3, ISearchBuilder.class);
|
ISearchBuilder searchBuilder = t.getArgument(3, ISearchBuilder.class);
|
||||||
PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, searchTask, searchBuilder, requestDetails);
|
PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, searchTask, searchBuilder, requestDetails);
|
||||||
|
retVal.setDaoConfigForUnitTest(new DaoConfig());
|
||||||
retVal.setTxManagerForUnitTest(myTxManager);
|
retVal.setTxManagerForUnitTest(myTxManager);
|
||||||
retVal.setSearchCoordinatorSvcForUnitTest(mySvc);
|
retVal.setSearchCoordinatorSvcForUnitTest(mySvc);
|
||||||
return retVal;
|
return retVal;
|
||||||
|
@ -509,6 +510,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
provider.setDaoRegistryForUnitTest(myDaoRegistry);
|
provider.setDaoRegistryForUnitTest(myDaoRegistry);
|
||||||
provider.setSearchBuilderFactoryForUnitTest(mySearchBuilderFactory);
|
provider.setSearchBuilderFactoryForUnitTest(mySearchBuilderFactory);
|
||||||
provider.setSearchCoordinatorSvcForUnitTest(mySvc);
|
provider.setSearchCoordinatorSvcForUnitTest(mySvc);
|
||||||
|
provider.setDaoConfigForUnitTest(new DaoConfig());
|
||||||
resources = provider.getResources(20, 40);
|
resources = provider.getResources(20, 40);
|
||||||
assertEquals(20, resources.size());
|
assertEquals(20, resources.size());
|
||||||
assertEquals("30", resources.get(0).getIdElement().getValueAsString());
|
assertEquals("30", resources.get(0).getIdElement().getValueAsString());
|
||||||
|
@ -527,6 +529,7 @@ public class SearchCoordinatorSvcImplTest {
|
||||||
provider.setSearchBuilderFactoryForUnitTest(mySearchBuilderFactory);
|
provider.setSearchBuilderFactoryForUnitTest(mySearchBuilderFactory);
|
||||||
provider.setDaoRegistryForUnitTest(myDaoRegistry);
|
provider.setDaoRegistryForUnitTest(myDaoRegistry);
|
||||||
provider.setSearchCoordinatorSvcForUnitTest(mySvc);
|
provider.setSearchCoordinatorSvcForUnitTest(mySvc);
|
||||||
|
provider.setDaoConfigForUnitTest(new DaoConfig());
|
||||||
return provider;
|
return provider;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package ca.uhn.fhir.jpa.subscription.resthook;
|
package ca.uhn.fhir.jpa.subscription.resthook;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.config.StoppableSubscriptionDeliveringRestHookSubscriber;
|
import ca.uhn.fhir.jpa.config.StoppableSubscriptionDeliveringRestHookSubscriber;
|
||||||
import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test;
|
import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test;
|
||||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
|
@ -48,6 +49,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
||||||
ourLog.info("@AfterEach");
|
ourLog.info("@AfterEach");
|
||||||
myStoppableSubscriptionDeliveringRestHookSubscriber.setCountDownLatch(null);
|
myStoppableSubscriptionDeliveringRestHookSubscriber.setCountDownLatch(null);
|
||||||
myStoppableSubscriptionDeliveringRestHookSubscriber.unPause();
|
myStoppableSubscriptionDeliveringRestHookSubscriber.unPause();
|
||||||
|
myDaoConfig.setTriggerSubscriptionsForNonVersioningChanges(new DaoConfig().isTriggerSubscriptionsForNonVersioningChanges());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -279,6 +281,108 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRestHookSubscriptionMetaAddDoesntTriggerNewDelivery() throws Exception {
|
||||||
|
String payload = "application/fhir+json";
|
||||||
|
|
||||||
|
String code = "1000000050";
|
||||||
|
String criteria1 = "Observation?code=SNOMED-CT|" + code + "&_format=xml";
|
||||||
|
String criteria2 = "Observation?code=SNOMED-CT|" + code + "111&_format=xml";
|
||||||
|
|
||||||
|
createSubscription(criteria1, payload);
|
||||||
|
createSubscription(criteria2, payload);
|
||||||
|
waitForActivatedSubscriptionCount(2);
|
||||||
|
|
||||||
|
Observation obs = sendObservation(code, "SNOMED-CT");
|
||||||
|
|
||||||
|
// Should see 1 subscription notification
|
||||||
|
waitForQueueToDrain();
|
||||||
|
waitForSize(0, ourCreatedObservations);
|
||||||
|
waitForSize(1, ourUpdatedObservations);
|
||||||
|
assertEquals(Constants.CT_FHIR_JSON_NEW, ourContentTypes.get(0));
|
||||||
|
|
||||||
|
// Send a meta-add
|
||||||
|
obs.setId(obs.getIdElement().toUnqualifiedVersionless());
|
||||||
|
myClient.meta().add().onResource(obs.getIdElement()).meta(new Meta().addTag("http://blah", "blah", null)).execute();
|
||||||
|
|
||||||
|
obs = myClient.read().resource(Observation.class).withId(obs.getIdElement().toUnqualifiedVersionless()).execute();
|
||||||
|
Coding tag = obs.getMeta().getTag("http://blah", "blah");
|
||||||
|
assertNotNull(tag);
|
||||||
|
|
||||||
|
// Should be no further deliveries
|
||||||
|
Thread.sleep(1000);
|
||||||
|
waitForQueueToDrain();
|
||||||
|
waitForSize(0, ourCreatedObservations);
|
||||||
|
waitForSize(1, ourUpdatedObservations);
|
||||||
|
|
||||||
|
// Send a meta-delete
|
||||||
|
obs.setId(obs.getIdElement().toUnqualifiedVersionless());
|
||||||
|
myClient.meta().delete().onResource(obs.getIdElement()).meta(new Meta().addTag("http://blah", "blah", null)).execute();
|
||||||
|
|
||||||
|
obs = myClient.read().resource(Observation.class).withId(obs.getIdElement().toUnqualifiedVersionless()).execute();
|
||||||
|
tag = obs.getMeta().getTag("http://blah", "blah");
|
||||||
|
assertNull(tag);
|
||||||
|
|
||||||
|
// Should be no further deliveries
|
||||||
|
Thread.sleep(1000);
|
||||||
|
waitForQueueToDrain();
|
||||||
|
waitForSize(0, ourCreatedObservations);
|
||||||
|
waitForSize(1, ourUpdatedObservations);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRestHookSubscriptionMetaAddDoesTriggerNewDeliveryIfConfiguredToDoSo() throws Exception {
|
||||||
|
myDaoConfig.setTriggerSubscriptionsForNonVersioningChanges(true);
|
||||||
|
|
||||||
|
String payload = "application/fhir+json";
|
||||||
|
|
||||||
|
String code = "1000000050";
|
||||||
|
String criteria1 = "Observation?code=SNOMED-CT|" + code + "&_format=xml";
|
||||||
|
String criteria2 = "Observation?code=SNOMED-CT|" + code + "111&_format=xml";
|
||||||
|
|
||||||
|
createSubscription(criteria1, payload);
|
||||||
|
createSubscription(criteria2, payload);
|
||||||
|
waitForActivatedSubscriptionCount(2);
|
||||||
|
|
||||||
|
Observation obs = sendObservation(code, "SNOMED-CT");
|
||||||
|
|
||||||
|
// Should see 1 subscription notification
|
||||||
|
waitForQueueToDrain();
|
||||||
|
waitForSize(0, ourCreatedObservations);
|
||||||
|
waitForSize(1, ourUpdatedObservations);
|
||||||
|
assertEquals(Constants.CT_FHIR_JSON_NEW, ourContentTypes.get(0));
|
||||||
|
|
||||||
|
// Send a meta-add
|
||||||
|
obs.setId(obs.getIdElement().toUnqualifiedVersionless());
|
||||||
|
myClient.meta().add().onResource(obs.getIdElement()).meta(new Meta().addTag("http://blah", "blah", null)).execute();
|
||||||
|
|
||||||
|
obs = myClient.read().resource(Observation.class).withId(obs.getIdElement().toUnqualifiedVersionless()).execute();
|
||||||
|
Coding tag = obs.getMeta().getTag("http://blah", "blah");
|
||||||
|
assertNotNull(tag);
|
||||||
|
|
||||||
|
// Should be no further deliveries
|
||||||
|
Thread.sleep(1000);
|
||||||
|
waitForQueueToDrain();
|
||||||
|
waitForSize(0, ourCreatedObservations);
|
||||||
|
waitForSize(3, ourUpdatedObservations);
|
||||||
|
|
||||||
|
// Send a meta-delete
|
||||||
|
obs.setId(obs.getIdElement().toUnqualifiedVersionless());
|
||||||
|
myClient.meta().delete().onResource(obs.getIdElement()).meta(new Meta().addTag("http://blah", "blah", null)).execute();
|
||||||
|
|
||||||
|
obs = myClient.read().resource(Observation.class).withId(obs.getIdElement().toUnqualifiedVersionless()).execute();
|
||||||
|
tag = obs.getMeta().getTag("http://blah", "blah");
|
||||||
|
assertNull(tag);
|
||||||
|
|
||||||
|
// Should be no further deliveries
|
||||||
|
Thread.sleep(1000);
|
||||||
|
waitForQueueToDrain();
|
||||||
|
waitForSize(0, ourCreatedObservations);
|
||||||
|
waitForSize(5, ourUpdatedObservations);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRestHookSubscriptionNoopUpdateDoesntTriggerNewDelivery() throws Exception {
|
public void testRestHookSubscriptionNoopUpdateDoesntTriggerNewDelivery() throws Exception {
|
||||||
String payload = "application/fhir+json";
|
String payload = "application/fhir+json";
|
||||||
|
|
|
@ -210,8 +210,9 @@ public class SearchParameterMap implements Serializable {
|
||||||
return myCount;
|
return myCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setCount(Integer theCount) {
|
public SearchParameterMap setCount(Integer theCount) {
|
||||||
myCount = theCount;
|
myCount = theCount;
|
||||||
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Integer getOffset() {
|
public Integer getOffset() {
|
||||||
|
|
|
@ -6,6 +6,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||||
import ca.uhn.fhir.interceptor.api.Interceptor;
|
import ca.uhn.fhir.interceptor.api.Interceptor;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.subscription.channel.impl.LinkedBlockingChannel;
|
import ca.uhn.fhir.jpa.subscription.channel.impl.LinkedBlockingChannel;
|
||||||
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
|
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
|
||||||
import ca.uhn.fhir.jpa.subscription.match.matcher.matching.IResourceModifiedConsumer;
|
import ca.uhn.fhir.jpa.subscription.match.matcher.matching.IResourceModifiedConsumer;
|
||||||
|
@ -26,6 +27,8 @@ import org.springframework.messaging.MessageChannel;
|
||||||
import org.springframework.transaction.support.TransactionSynchronizationAdapter;
|
import org.springframework.transaction.support.TransactionSynchronizationAdapter;
|
||||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
/*-
|
/*-
|
||||||
* #%L
|
* #%L
|
||||||
* HAPI FHIR Subscription Server
|
* HAPI FHIR Subscription Server
|
||||||
|
@ -55,6 +58,8 @@ public class SubscriptionMatcherInterceptor implements IResourceModifiedConsumer
|
||||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||||
@Autowired
|
@Autowired
|
||||||
private SubscriptionChannelFactory mySubscriptionChannelFactory;
|
private SubscriptionChannelFactory mySubscriptionChannelFactory;
|
||||||
|
@Autowired
|
||||||
|
private DaoConfig myDaoConfig;
|
||||||
|
|
||||||
private volatile MessageChannel myMatchingChannel;
|
private volatile MessageChannel myMatchingChannel;
|
||||||
|
|
||||||
|
@ -87,6 +92,16 @@ public class SubscriptionMatcherInterceptor implements IResourceModifiedConsumer
|
||||||
@Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED)
|
@Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED)
|
||||||
public void resourceUpdated(IBaseResource theOldResource, IBaseResource theNewResource, RequestDetails theRequest) {
|
public void resourceUpdated(IBaseResource theOldResource, IBaseResource theNewResource, RequestDetails theRequest) {
|
||||||
startIfNeeded();
|
startIfNeeded();
|
||||||
|
if (!myDaoConfig.isTriggerSubscriptionsForNonVersioningChanges()) {
|
||||||
|
if (theOldResource != null && theNewResource != null) {
|
||||||
|
String oldVersion = theOldResource.getIdElement().getVersionIdPart();
|
||||||
|
String newVersion = theNewResource.getIdElement().getVersionIdPart();
|
||||||
|
if (isNotBlank(oldVersion) && isNotBlank(newVersion) && oldVersion.equals(newVersion)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
submitResourceModified(theNewResource, ResourceModifiedMessage.OperationTypeEnum.UPDATE, theRequest);
|
submitResourceModified(theNewResource, ResourceModifiedMessage.OperationTypeEnum.UPDATE, theRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue