Merge branch 'master' into issue-2901-npe-in-bundle-transaction

This commit is contained in:
Tadgh 2021-08-23 12:05:31 -04:00
commit ee9c630228
24 changed files with 672 additions and 104 deletions

View File

@ -3,4 +3,6 @@ type: add
issue: 2676
backport: 5.4.1
title: "A new setting has been added to the DaoConfig that allows the maximum number of
`_include` and `_revinclude` resources to be added to a single search page result."
`_include` and `_revinclude` resources to be added to a single search page result. In addition, the
include/revinclue processor have been redesigned to avoid accidentally overloading the server if
an include/revinclude would return unexpected massive amounts of data."

View File

@ -0,0 +1,5 @@
---
type: add
issue: 2836
jira: SMILE-2197
title: "FHIR bundle batch is now processed in parallel by default and is configurable by DaoConfig."

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 2902
jira: SMILE-3000
backport: cust_fmc_5_3
title: "Fixed a bug wherein includes were not being included on revincludes."

View File

@ -258,6 +258,17 @@ public class DaoConfig {
private boolean myAccountForDateIndexNulls;
private boolean myTriggerSubscriptionsForNonVersioningChanges;
/**
* @since 5.6.0
*/
// Thread Pool size used by batch in bundle
public static final int DEFAULT_BUNDLE_BATCH_POOL_SIZE = 20; // 1 for single thread
public static final int DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE = 100; // 1 for single thread
public static final int DEFAULT_BUNDLE_BATCH_QUEUE_CAPACITY = 200;
private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE;
private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE;
/**
* Constructor
*/
@ -2570,6 +2581,44 @@ public class DaoConfig {
myTriggerSubscriptionsForNonVersioningChanges = theTriggerSubscriptionsForNonVersioningChanges;
}
/**
* Get the batch transaction thread pool size.
*
* @since 5.6.0
*/
public Integer getBundleBatchPoolSize() {
return myBundleBatchPoolSize;
}
/**
* Set the batch transaction thread pool size. The default is @see {@link #DEFAULT_BUNDLE_BATCH_POOL_SIZE}
* set pool size to 1 for single thread
*
* @since 5.6.0
*/
public void setBundleBatchPoolSize(Integer theBundleBatchPoolSize) {
this.myBundleBatchPoolSize = theBundleBatchPoolSize;
}
/**
* Get the batch transaction thread max pool size.
* set max pool size to 1 for single thread
*
* @since 5.6.0
*/
public Integer getBundleBatchMaxPoolSize() {
return myBundleBatchMaxPoolSize;
}
/**
* Set the batch transaction thread pool size. The default is @see {@link #DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE}
*
* @since 5.6.0
*/
public void setBundleBatchMaxPoolSize(Integer theBundleBatchMaxPoolSize) {
this.myBundleBatchMaxPoolSize = theBundleBatchMaxPoolSize;
}
public boolean canDeleteExpunge() {
return isAllowMultipleDelete() && isExpungeEnabled() && isDeleteExpungeEnabled();
}

View File

@ -886,7 +886,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
IRestfulServerDefaults server = theRequestDetails.getServer();
IPagingProvider pagingProvider = server.getPagingProvider();
return pagingProvider instanceof DatabaseBackedPagingProvider;
return pagingProvider != null;
}
protected void markResourcesMatchingExpressionAsNeedingReindexing(Boolean theCurrentlyReindexing, String theExpression) {

View File

@ -72,6 +72,7 @@ import ca.uhn.fhir.util.ElementUtil;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.ResourceReferenceInfo;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.AsyncUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap;
@ -92,6 +93,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.support.TransactionCallback;
@ -112,6 +114,10 @@ import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import static ca.uhn.fhir.util.StringUtil.toUtf8String;
@ -146,6 +152,8 @@ public abstract class BaseTransactionProcessor {
@Autowired
private InMemoryResourceMatcher myInMemoryResourceMatcher;
private ThreadPoolTaskExecutor myExecutor ;
@VisibleForTesting
public void setDaoConfig(DaoConfig theDaoConfig) {
myDaoConfig = theDaoConfig;
@ -163,6 +171,16 @@ public abstract class BaseTransactionProcessor {
@PostConstruct
public void start() {
ourLog.trace("Starting transaction processor");
myExecutor = new ThreadPoolTaskExecutor();
myExecutor.setThreadNamePrefix("bundle_batch_");
// For single thread set the value to 1
//myExecutor.setCorePoolSize(1);
//myExecutor.setMaxPoolSize(1);
myExecutor.setCorePoolSize(myDaoConfig.getBundleBatchPoolSize());
myExecutor.setMaxPoolSize(myDaoConfig.getBundleBatchMaxPoolSize());
myExecutor.setQueueCapacity(DaoConfig.DEFAULT_BUNDLE_BATCH_QUEUE_CAPACITY);
myExecutor.initialize();
}
public <BUNDLE extends IBaseBundle> BUNDLE transaction(RequestDetails theRequestDetails, BUNDLE theRequest, boolean theNestedMode) {
@ -309,59 +327,54 @@ public abstract class BaseTransactionProcessor {
private IBaseBundle batch(final RequestDetails theRequestDetails, IBaseBundle theRequest, boolean theNestedMode) {
ourLog.info("Beginning batch with {} resources", myVersionAdapter.getEntries(theRequest).size());
long start = System.currentTimeMillis();
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
IBaseBundle resp = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.BATCHRESPONSE.toCode());
IBaseBundle response = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.BATCHRESPONSE.toCode());
Map<Integer, Object> responseMap = new ConcurrentHashMap<>();
List<IBase> requestEntries = myVersionAdapter.getEntries(theRequest);
int requestEntriesSize = requestEntries.size();
/*
* For batch, we handle each entry as a mini-transaction in its own database transaction so that if one fails, it doesn't prevent others
*/
for (final Object nextRequestEntry : myVersionAdapter.getEntries(theRequest)) {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
try {
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
myVersionAdapter.addEntry(subRequestBundle, (IBase) nextRequestEntry);
IBaseBundle nextResponseBundle = processTransactionAsSubRequest(theRequestDetails, subRequestBundle, "Batch sub-request", theNestedMode);
IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myVersionAdapter.addEntry(resp, subResponseEntry);
/*
* If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it
*/
if (myVersionAdapter.getResource(subResponseEntry) == null) {
IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myVersionAdapter.setResource(subResponseEntry, myVersionAdapter.getResource(nextResponseBundleFirstEntry));
}
} catch (BaseServerResponseException e) {
caughtEx.setException(e);
} catch (Throwable t) {
ourLog.error("Failure during BATCH sub transaction processing", t);
caughtEx.setException(new InternalErrorException(t));
}
if (caughtEx.getException() != null) {
IBase nextEntry = myVersionAdapter.addEntry(resp);
populateEntryWithOperationOutcome(caughtEx.getException(), nextEntry);
myVersionAdapter.setResponseStatus(nextEntry, toStatusString(caughtEx.getException().getStatusCode()));
}
// And execute for each entry in parallel as a mini-transaction in its
// own database transaction so that if one fails, it doesn't prevent others.
// The result is keep in the map to save the original position
CountDownLatch completionLatch = new CountDownLatch(requestEntriesSize);
IBase nextRequestEntry = null;
for (int i=0; i<requestEntriesSize; i++ ) {
nextRequestEntry = requestEntries.get(i);
BundleTask bundleTask = new BundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode);
myExecutor.submit(bundleTask);
}
// waiting for all tasks to be completed
AsyncUtil.awaitLatchAndIgnoreInterrupt(completionLatch, 300L, TimeUnit.SECONDS);
// Now, create the bundle response in original order
Object nextResponseEntry;
for (int i=0; i<requestEntriesSize; i++ ) {
nextResponseEntry = responseMap.get(i);
if (nextResponseEntry instanceof BaseServerResponseExceptionHolder) {
BaseServerResponseExceptionHolder caughtEx = (BaseServerResponseExceptionHolder)nextResponseEntry;
if (caughtEx.getException() != null) {
IBase nextEntry = myVersionAdapter.addEntry(response);
populateEntryWithOperationOutcome(caughtEx.getException(), nextEntry);
myVersionAdapter.setResponseStatus(nextEntry, toStatusString(caughtEx.getException().getStatusCode()));
}
} else {
myVersionAdapter.addEntry(response, (IBase)nextResponseEntry);
}
}
long delay = System.currentTimeMillis() - start;
ourLog.info("Batch completed in {}ms", delay);
return resp;
return response;
}
@VisibleForTesting
@ -1544,5 +1557,62 @@ public abstract class BaseTransactionProcessor {
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
}
public class BundleTask implements Callable<Void> {
private CountDownLatch myCompletedLatch;
private ServletRequestDetails myRequestDetails;
private IBase myNextReqEntry;
private Map<Integer, Object> myResponseMap;
private int myResponseOrder;
private boolean myNestedMode;
protected BundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map<Integer, Object> theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) {
this.myCompletedLatch = theCompletedLatch;
this.myRequestDetails = (ServletRequestDetails)theRequestDetails;
this.myNextReqEntry = theNextReqEntry;
this.myResponseMap = theResponseMap;
this.myResponseOrder = theResponseOrder;
this.myNestedMode = theNestedMode;
}
@Override
public Void call() {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
try {
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
myVersionAdapter.addEntry(subRequestBundle, (IBase) myNextReqEntry);
IBaseBundle nextResponseBundle = processTransactionAsSubRequest(myRequestDetails, subRequestBundle, "Batch sub-request", myNestedMode);
IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myResponseMap.put(myResponseOrder, subResponseEntry);
/*
* If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it
*/
if (myVersionAdapter.getResource(subResponseEntry) == null) {
IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myResponseMap.put(myResponseOrder, nextResponseBundleFirstEntry);
}
} catch (BaseServerResponseException e) {
caughtEx.setException(e);
} catch (Throwable t) {
ourLog.error("Failure during BATCH sub transaction processing", t);
caughtEx.setException(new InternalErrorException(t));
}
if (caughtEx.getException() != null) {
// add exception to the response map
myResponseMap.put(myResponseOrder, caughtEx);
}
// checking for the parallelism
ourLog.debug("processing bacth for {} is completed", myVersionAdapter.getEntryRequestUrl((IBase)myNextReqEntry));
myCompletedLatch.countDown();
return null;
}
}
}

View File

@ -152,7 +152,7 @@ public class IdHelperService {
Validate.notNull(theId, "theId must not be null");
ResourcePersistentId retVal;
if (myDaoConfig.getResourceClientIdStrategy() == DaoConfig.ClientIdStrategyEnum.ANY || !isValidPid(theId)) {
if (idRequiresForcedId (theId)) {
if (myDaoConfig.isDeleteEnabled()) {
retVal = new ResourcePersistentId(resolveResourceIdentity(theRequestPartitionId, theResourceType, theId).getResourceId());
} else {
@ -174,6 +174,17 @@ public class IdHelperService {
return retVal;
}
/**
* Returns true if the given resource ID should be stored in a forced ID. Under default config
* (meaning client ID strategy is {@link ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum#ALPHANUMERIC})
* this will return true if the ID has any non-digit characters.
*
* In {@link ca.uhn.fhir.jpa.api.config.DaoConfig.ClientIdStrategyEnum#ANY} mode it will always return true.
*/
public boolean idRequiresForcedId(String theId) {
return myDaoConfig.getResourceClientIdStrategy() == DaoConfig.ClientIdStrategyEnum.ANY || !isValidPid(theId);
}
@Nonnull
private String toForcedIdToPidKey(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
return RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId;

View File

@ -41,6 +41,7 @@ import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
import ca.uhn.fhir.jpa.util.InterceptorUtil;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.primitive.InstantDt;
@ -74,6 +75,7 @@ import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
public class PersistedJpaBundleProvider implements IBundleProvider {
@ -409,19 +411,25 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
// Note: Leave as protected, HSPC depends on this
@SuppressWarnings("WeakerAccess")
protected List<IBaseResource> toResourceList(ISearchBuilder theSearchBuilder, List<ResourcePersistentId> thePids) {
Set<ResourcePersistentId> includedPids = new HashSet<>();
List<ResourcePersistentId> includedPidList = new ArrayList<>();
if (mySearchEntity.getSearchType() == SearchTypeEnum.SEARCH) {
Integer maxIncludes = myDaoConfig.getMaximumIncludesToLoadPerPage();
includedPids.addAll(theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes));
// Load _revincludes
Set<ResourcePersistentId> includedPids = theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toRevIncludesList(), true, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes);
if (maxIncludes != null) {
maxIncludes -= includedPids.size();
}
includedPids.addAll(theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes));
}
thePids.addAll(includedPids);
includedPidList.addAll(includedPids);
List<ResourcePersistentId> includedPidList = new ArrayList<>(includedPids);
thePids.addAll(includedPidList);
// Load _includes
Set<ResourcePersistentId> revIncludedPids = theSearchBuilder.loadIncludes(myContext, myEntityManager, thePids, mySearchEntity.toIncludesList(), false, mySearchEntity.getLastUpdated(), myUuid, myRequest, maxIncludes);
thePids.addAll(revIncludedPids);
includedPidList.addAll(revIncludedPids);
}
// Execute the query and make sure we return distinct results
List<IBaseResource> resources = new ArrayList<>();

View File

@ -536,19 +536,22 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
* individually for pages as we return them to clients
*/
// _includes
Integer maxIncludes = myDaoConfig.getMaximumIncludesToLoadPerPage();
final Set<ResourcePersistentId> includedPids = theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getRevIncludes(), true, theParams.getLastUpdated(), "(synchronous)", theRequestDetails, maxIncludes);
if (maxIncludes != null) {
maxIncludes -= includedPids.size();
}
if (theParams.getEverythingMode() == null && (maxIncludes == null || maxIncludes > 0)) {
includedPids.addAll(theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated(), "(synchronous)", theRequestDetails, maxIncludes));
}
pids.addAll(includedPids);
List<ResourcePersistentId> includedPidsList = new ArrayList<>(includedPids);
pids.addAll(includedPidsList);
// _revincludes
if (theParams.getEverythingMode() == null && (maxIncludes == null || maxIncludes > 0)) {
Set<ResourcePersistentId> revIncludedPids = theSb.loadIncludes(myContext, myEntityManager, pids, theParams.getIncludes(), false, theParams.getLastUpdated(), "(synchronous)", theRequestDetails, maxIncludes);
includedPids.addAll(revIncludedPids);
pids.addAll(revIncludedPids);
includedPidsList.addAll(revIncludedPids);
}
List<IBaseResource> resources = new ArrayList<>();
theSb.loadResourcesByPid(pids, includedPidsList, resources, false, theRequestDetails);

View File

@ -220,9 +220,14 @@ public class SearchBuilder implements ISearchBuilder {
}
SearchContainedModeEnum searchContainedMode = theParams.getSearchContainedMode();
// Handle _id last, since it can typically be tacked onto a different parameter
List<String> paramNames = myParams.keySet().stream().filter(t -> !t.equals(IAnyResource.SP_RES_ID)).collect(Collectors.toList());
if (myParams.containsKey(IAnyResource.SP_RES_ID)) {
paramNames.add(IAnyResource.SP_RES_ID);
}
// Handle each parameter
List<String> paramNames = new ArrayList<>(myParams.keySet());
for (String nextParamName : paramNames) {
if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) {
// Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by Elasticsearch
@ -857,7 +862,7 @@ public class SearchBuilder implements ISearchBuilder {
resourceLink = (Long) ((Object[]) nextRow)[0];
version = (Long) ((Object[]) nextRow)[1];
} else {
resourceLink = (Long)nextRow;
resourceLink = (Long) nextRow;
}
pidsToInclude.add(new ResourcePersistentId(resourceLink, version));
@ -926,8 +931,8 @@ public class SearchBuilder implements ISearchBuilder {
if (resourceLink != null) {
ResourcePersistentId persistentId;
if (findVersionFieldName != null) {
persistentId = new ResourcePersistentId(((Object[])resourceLink)[0]);
persistentId.setVersion((Long) ((Object[])resourceLink)[1]);
persistentId = new ResourcePersistentId(((Object[]) resourceLink)[0]);
persistentId.setVersion((Long) ((Object[]) resourceLink)[1]);
} else {
persistentId = new ResourcePersistentId(resourceLink);
}
@ -977,7 +982,7 @@ public class SearchBuilder implements ISearchBuilder {
.add(IPreResourceAccessDetails.class, accessDetails)
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
for (int i = includedPidList.size() - 1; i >= 0; i--) {
if (accessDetails.isDontReturnResourceAtIndex(i)) {

View File

@ -82,8 +82,8 @@ public class BasePredicateBuilder {
return mySearchSqlBuilder.createConditionForValueWithComparator(theComparator, theColumn, theValue);
}
protected BaseJoiningPredicateBuilder getOrCreateQueryRootTable() {
return mySearchSqlBuilder.getOrCreateFirstPredicateBuilder();
protected BaseJoiningPredicateBuilder getOrCreateQueryRootTable(boolean theIncludeResourceTypeAndNonDeletedFlag) {
return mySearchSqlBuilder.getOrCreateFirstPredicateBuilder(theIncludeResourceTypeAndNonDeletedFlag);
}
public void addJoin(DbTable theFromTable, DbTable theToTable, DbColumn theFromColumn, DbColumn theToColumn) {

View File

@ -65,6 +65,7 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
Set<ResourcePersistentId> allOrPids = null;
SearchFilterParser.CompareOperation defaultOperation = SearchFilterParser.CompareOperation.eq;
boolean allIdsAreForcedIds = true;
for (List<? extends IQueryParameterType> nextValue : theValues) {
Set<ResourcePersistentId> orPids = new HashSet<>();
boolean haveValue = false;
@ -76,6 +77,9 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
IdType valueAsId = new IdType(value);
if (isNotBlank(value)) {
if (!myIdHelperService.idRequiresForcedId(valueAsId.getIdPart()) && allIdsAreForcedIds) {
allIdsAreForcedIds = false;
}
haveValue = true;
try {
ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(theRequestPartitionId, theResourceName, valueAsId.getIdPart());
@ -114,7 +118,7 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
List<Long> resourceIds = ResourcePersistentId.toLongList(allOrPids);
if (theSourceJoinColumn == null) {
BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable();
BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(!allIdsAreForcedIds);
Condition predicate;
switch (operation) {
default:

View File

@ -469,18 +469,31 @@ public class SearchQueryBuilder {
* If at least one predicate builder already exists, return the last one added to the chain. If none has been selected, create a builder on HFJ_RESOURCE, add it and return it.
*/
public BaseJoiningPredicateBuilder getOrCreateFirstPredicateBuilder() {
return getOrCreateFirstPredicateBuilder(true);
}
/**
* If at least one predicate builder already exists, return the last one added to the chain. If none has been selected, create a builder on HFJ_RESOURCE, add it and return it.
*/
public BaseJoiningPredicateBuilder getOrCreateFirstPredicateBuilder(boolean theIncludeResourceTypeAndNonDeletedFlag) {
if (myFirstPredicateBuilder == null) {
getOrCreateResourceTablePredicateBuilder();
getOrCreateResourceTablePredicateBuilder(theIncludeResourceTypeAndNonDeletedFlag);
}
return myFirstPredicateBuilder;
}
public ResourceTablePredicateBuilder getOrCreateResourceTablePredicateBuilder() {
return getOrCreateResourceTablePredicateBuilder(true);
}
public ResourceTablePredicateBuilder getOrCreateResourceTablePredicateBuilder(boolean theIncludeResourceTypeAndNonDeletedFlag) {
if (myResourceTableRoot == null) {
ResourceTablePredicateBuilder resourceTable = mySqlBuilderFactory.resourceTable(this);
addTable(resourceTable, null);
Condition typeAndDeletionPredicate = resourceTable.createResourceTypeAndNonDeletedPredicates();
addPredicate(typeAndDeletionPredicate);
if (theIncludeResourceTypeAndNonDeletedFlag) {
Condition typeAndDeletionPredicate = resourceTable.createResourceTypeAndNonDeletedPredicates();
addPredicate(typeAndDeletionPredicate);
}
myResourceTableRoot = resourceTable;
}
return myResourceTableRoot;

View File

@ -103,6 +103,7 @@ import org.hl7.fhir.r4.model.AllergyIntolerance;
import org.hl7.fhir.r4.model.Appointment;
import org.hl7.fhir.r4.model.AuditEvent;
import org.hl7.fhir.r4.model.Binary;
import org.hl7.fhir.r4.model.BodyStructure;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.CapabilityStatement;
import org.hl7.fhir.r4.model.CarePlan;
@ -371,6 +372,9 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
@Qualifier("myBinaryDaoR4")
protected IFhirResourceDao<Binary> myBinaryDao;
@Autowired
@Qualifier("myBodyStructureDaoR4")
protected IFhirResourceDao<BodyStructure> myBodyStructureDao;
@Autowired
@Qualifier("myDocumentReferenceDaoR4")
protected IFhirResourceDao<DocumentReference> myDocumentReferenceDao;
@Autowired

View File

@ -155,12 +155,12 @@ public class FhirResourceDaoR4ConcurrentWriteTest extends BaseJpaR4Test {
}
logAllResourceLinks();
runInTransaction(() -> {
Map<String, Integer> counts = getResourceCountMap();
assertEquals(1, counts.get("Patient"), counts.toString());
assertEquals(1, counts.get("Observation"), counts.toString());
logAllResourceLinks();
assertEquals(6, myResourceLinkDao.count());
assertEquals(6, myResourceTableDao.count());
assertEquals(14, myResourceHistoryTableDao.count());
@ -194,12 +194,12 @@ public class FhirResourceDaoR4ConcurrentWriteTest extends BaseJpaR4Test {
}
logAllResourceLinks();
runInTransaction(() -> {
Map<String, Integer> counts = getResourceCountMap();
assertEquals(1, counts.get("Patient"), counts.toString());
assertEquals(1, counts.get("Observation"), counts.toString());
logAllResourceLinks();
assertEquals(6, myResourceLinkDao.count());
assertEquals(6, myResourceTableDao.count());
assertEquals(14, myResourceHistoryTableDao.count());

View File

@ -929,7 +929,7 @@ public class FhirResourceDaoR4LegacySearchBuilderTest extends BaseJpaR4Test {
List<IIdType> actual = toUnqualifiedVersionlessIds(resp);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertThat(actual, containsInAnyOrder(orgId, medId, patId, moId, patId2));
assertEquals(6, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(1, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
// Specific patient ID with linked stuff
request = mock(HttpServletRequest.class);

View File

@ -1,17 +1,25 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.PersistedJpaSearchFirstPageBundleProvider;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.hamcrest.collection.IsIterableContainingInAnyOrder;
import org.hl7.fhir.r4.model.BodyStructure;
import org.hl7.fhir.r4.model.CarePlan;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.EpisodeOfCare;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Procedure;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.SearchParameter;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
@ -23,7 +31,10 @@ import java.util.stream.IntStream;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hl7.fhir.r4.model.ResourceType.BodyStructure;
import static org.hl7.fhir.r4.model.ResourceType.Patient;
import static org.hl7.fhir.r4.model.ResourceType.Procedure;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
@SuppressWarnings({"unchecked", "Duplicates"})
@ -114,6 +125,61 @@ public class FhirResourceDaoR4SearchIncludeTest extends BaseJpaR4Test {
}
}
@Test
public void testRevIncludeOnIncludedResource() {
SearchParameter sp = new SearchParameter();
sp.addBase("Procedure");
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
sp.setCode("focalAccess");
sp.setType(Enumerations.SearchParamType.REFERENCE);
sp.setExpression("Procedure.extension('http://fkcfhir.org/fhir/cs/CS1MachNumber')");
sp.addTarget("BodyStructure");
mySearchParameterDao.create(sp, mySrd);
mySearchParamRegistry.forceRefresh();
BodyStructure bs = new BodyStructure();
bs.setId("B51936689");
bs.setDescription("Foo");
myBodyStructureDao.update(bs, mySrd);
Procedure p = new Procedure();
p.setId("PRA8780542726");
p.setStatus(org.hl7.fhir.r4.model.Procedure.ProcedureStatus.COMPLETED);
myProcedureDao.update(p, mySrd);
p = new Procedure();
p.setId("PRA8780542785");
p.addPartOf().setReference("Procedure/PRA8780542726");
p.setStatus(org.hl7.fhir.r4.model.Procedure.ProcedureStatus.COMPLETED);
p.addExtension("http://fkcfhir.org/fhir/cs/CS1MachNumber", new Reference("BodyStructure/B51936689"));
myProcedureDao.update(p, mySrd);
logAllResources();
logAllResourceLinks();
// Non synchronous
SearchParameterMap map = new SearchParameterMap();
map.add("_id", new TokenParam("PRA8780542726"));
map.addRevInclude(new Include("Procedure:part-of"));
map.addInclude(new Include("Procedure:focalAccess").asRecursive());
IBundleProvider outcome = myProcedureDao.search(map, mySrd);
assertEquals(PersistedJpaSearchFirstPageBundleProvider.class, outcome.getClass());
List<String> ids = toUnqualifiedVersionlessIdValues(outcome);
assertThat(ids.toString(), ids, Matchers.containsInAnyOrder("Procedure/PRA8780542726", "Procedure/PRA8780542785", "BodyStructure/B51936689"));
// Synchronous
map = new SearchParameterMap().setLoadSynchronous(true);
map.add("_id", new TokenParam("PRA8780542726"));
map.addRevInclude(new Include("Procedure:part-of"));
map.addInclude(new Include("Procedure:focalAccess").asRecursive());
outcome = myProcedureDao.search(map, mySrd);
assertEquals(SimpleBundleProvider.class, outcome.getClass());
ids = toUnqualifiedVersionlessIdValues(outcome);
assertThat(ids.toString(), ids, Matchers.containsInAnyOrder("Procedure/PRA8780542726", "Procedure/PRA8780542785", "BodyStructure/B51936689"));
}
@Test
public void testRevIncludesPaged_AsyncSearch() {
int eocCount = 10;

View File

@ -33,6 +33,7 @@ public class FhirResourceDaoR4SearchLastNIT extends BaseR4SearchLastN {
// Set up search parameters that will return 75 Observations.
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
ReferenceParam subjectParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());

View File

@ -778,7 +778,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
List<IIdType> actual = toUnqualifiedVersionlessIds(resp);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertThat(actual, containsInAnyOrder(orgId, medId, patId, moId, patId2));
assertEquals(5, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(1, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
// Specific patient ID with linked stuff
request = mock(HttpServletRequest.class);
@ -1475,21 +1475,28 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
id2 = myOrganizationDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getValue();
}
// FIXME: restore
int size;
SearchParameterMap params = new SearchParameterMap();
// params.setLoadSynchronous(true);
// assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), contains(id1));
//
// params = new SearchParameterMap();
// params.add("_id", new StringParam(id1));
// assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), contains(id1));
//
// params = new SearchParameterMap();
// params.add("_id", new StringParam("9999999999999999"));
// assertEquals(0, toList(myPatientDao.search(params)).size());
myCaptureQueriesListener.clear();
params = new SearchParameterMap();
params.setLoadSynchronous(true);
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), contains(id1));
params = new SearchParameterMap();
params.add("_id", new StringParam(id1));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(params)), contains(id1));
params = new SearchParameterMap();
params.add("_id", new StringParam("9999999999999999"));
assertEquals(0, toList(myPatientDao.search(params)).size());
params = new SearchParameterMap();
params.add("_id", new StringParam(id2));
assertEquals(0, toList(myPatientDao.search(params)).size());
size = toList(myPatientDao.search(params)).size();
myCaptureQueriesListener.logAllQueries();
assertEquals(0, size);
}
@ -1596,8 +1603,8 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
assertEquals(1, countMatches(sqlQuery, "res_id = '123'"), sqlQuery);
assertEquals(1, countMatches(sqlQuery, "join"), sqlQuery);
assertEquals(1, countMatches(sqlQuery, "hash_sys_and_value"), sqlQuery);
assertEquals(1, countMatches(sqlQuery, "res_type = 'diagnosticreport"), sqlQuery); // could be 0
assertEquals(1, countMatches(sqlQuery, "res_deleted_at"), sqlQuery); // could be 0
assertEquals(0, countMatches(sqlQuery, "res_type = 'diagnosticreport"), sqlQuery); // could be 0
assertEquals(0, countMatches(sqlQuery, "res_deleted_at"), sqlQuery); // could be 0
}
}

View File

@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@ -18,6 +19,7 @@ import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.apache.commons.lang3.StringUtils;
@ -26,6 +28,8 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.Enumerations;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference;
@ -797,6 +801,112 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
assertEquals(1, StringUtils.countMatches(selectQuery, "SELECT"));
}
/**
* Make sure that if we're performing a query where the resource type is implicitly known,
* we don't include a selector for the resource type
*
* This test is for queries with _id where the ID is a forced ID
*/
@Test
public void testSearchOnIdAndReference_SearchById() {
Patient p = new Patient();
p.setId("B");
myPatientDao.update(p);
Observation obs = new Observation();
obs.setId("A");
obs.setSubject(new Reference("Patient/B"));
obs.setStatus(Observation.ObservationStatus.FINAL);
myObservationDao.update(obs);
Observation obs2 = new Observation();
obs2.setSubject(new Reference("Patient/B"));
obs2.setStatus(Observation.ObservationStatus.FINAL);
String obs2id = myObservationDao.create(obs2).getId().getIdPart();
assertThat(obs2id, matchesPattern("^[0-9]+$"));
// Search by ID where all IDs are forced IDs
{
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_id", new TokenParam("A"));
map.add("subject", new ReferenceParam("Patient/B"));
map.add("status", new TokenParam("final"));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails());
assertEquals(1, outcome.getResources(0, 999).size());
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "forcedid0_.resource_type='observation'"), selectQuery);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "forcedid0_.forced_id in ('a')"), selectQuery);
selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "select t1.res_id from hfj_resource t1"), selectQuery);
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), "t1.res_type = 'observation'"), selectQuery);
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), "t1.res_deleted_at is null"), selectQuery);
}
// Search by ID where at least one ID is a numeric ID
{
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_id", new TokenOrListParam(null, "A", obs2id));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails());
assertEquals(2, outcome.size());
assertEquals(2, outcome.getResources(0, 999).size());
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "select t0.res_id from hfj_resource t0"), selectQuery);
// Because we included a non-forced ID, we need to verify the type
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_type = 'observation'"), selectQuery);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_deleted_at is null"), selectQuery);
}
// Delete the resource - The searches should generate similar SQL now, but
// not actually return the result
myObservationDao.delete(new IdType("Observation/A"));
myObservationDao.delete(new IdType("Observation/" + obs2id));
// Search by ID where all IDs are forced IDs
{
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_id", new TokenParam("A"));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails());
assertEquals(0, outcome.size());
assertEquals(0, outcome.getResources(0, 999).size());
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "forcedid0_.resource_type='observation'"), selectQuery);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "forcedid0_.forced_id in ('a')"), selectQuery);
selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "select t0.res_id from hfj_resource t0"), selectQuery);
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_type = 'observation'"), selectQuery);
assertEquals(0, StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_deleted_at is null"), selectQuery);
}
// Search by ID where at least one ID is a numeric ID
{
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_id", new TokenOrListParam(null, "A", obs2id));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails());
assertEquals(0, outcome.size());
assertEquals(0, outcome.getResources(0, 999).size());
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "select t0.res_id from hfj_resource t0"), selectQuery);
// Because we included a non-forced ID, we need to verify the type
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_type = 'observation'"), selectQuery);
assertEquals(1, StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_deleted_at is null"), selectQuery);
}
}
@AfterEach
public void afterResetDao() {
myDaoConfig.setResourceMetaCountHardLimit(new DaoConfig().getResourceMetaCountHardLimit());

View File

@ -343,8 +343,8 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
.stream()
.collect(MultimapCollector.toMultimap(t -> new IdType(t.getResponse().getLocation()).toUnqualifiedVersionless().getResourceType(), t -> new IdType(t.getResponse().getLocation()).toUnqualifiedVersionless().getValue()));
logAllResources();
Multimap<String, Integer> resourcesByType = runInTransaction(() -> {
logAllResources();
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId()));
});
@ -378,8 +378,8 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
String patientId = resourceIds.get("Patient").get(0);
logAllResources();
Multimap<String, Integer> resourcesByType = runInTransaction(() -> {
logAllResources();
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId()));
});
@ -426,8 +426,8 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
String patientId = resourceIds.get("Patient").get(0);
logAllResources();
Multimap<String, Integer> resourcesByType = runInTransaction(() -> {
logAllResources();
return myResourceTableDao.findAll().stream().collect(MultimapCollector.toMultimap(t -> t.getResourceType(), t -> t.getPartitionId().getPartitionId()));
});

View File

@ -1,22 +1,50 @@
package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.Test;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
import java.util.ArrayList;
import java.util.List;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.Bundle.BundleType;
import org.hl7.fhir.r4.model.Bundle.HTTPVerb;
import org.hl7.fhir.r4.model.Condition;
import org.hl7.fhir.r4.model.Enumerations.AdministrativeGender;
import org.hl7.fhir.r4.model.OperationOutcome;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.server.exceptions.NotImplementedOperationException;
public class ResourceProviderR4BundleTest extends BaseResourceProviderR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderR4BundleTest.class);
@BeforeEach
@Override
public void before() throws Exception {
super.before();
myDaoConfig.setBundleBatchPoolSize(20);
myDaoConfig.setBundleBatchMaxPoolSize(100);
}
@AfterEach
@Override
public void after() throws Exception {
super.after();
myDaoConfig.setBundleBatchPoolSize(DaoConfig.DEFAULT_BUNDLE_BATCH_POOL_SIZE);
myDaoConfig.setBundleBatchMaxPoolSize(DaoConfig.DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE);
}
/**
* See #401
*/
@ -58,5 +86,181 @@ public class ResourceProviderR4BundleTest extends BaseResourceProviderR4Test {
}
@Test
public void testBundleBatch() {
List<String> ids = createPatients(50);
Bundle input = new Bundle();
input.setType(BundleType.BATCH);
for (String id : ids)
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(id);
Bundle output = myClient.transaction().withBundle(input).execute();
//ourLog.info("Bundle: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(50, output.getEntry().size());
List<BundleEntryComponent> bundleEntries = output.getEntry();
int i=0;
for (BundleEntryComponent bundleEntry : bundleEntries) {
assertEquals(ids.get(i++), bundleEntry.getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
}
}
@Test
public void testBundleBatchWithSingleThread() {
List<String> ids = createPatients(50);
myDaoConfig.setBundleBatchPoolSize(1);
myDaoConfig.setBundleBatchMaxPoolSize(1);
Bundle input = new Bundle();
input.setType(BundleType.BATCH);
for (String id : ids)
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(id);
Bundle output = myClient.transaction().withBundle(input).execute();
//ourLog.info("Bundle: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(50, output.getEntry().size());
List<BundleEntryComponent> bundleEntries = output.getEntry();
int i=0;
for (BundleEntryComponent bundleEntry : bundleEntries) {
assertEquals(ids.get(i++), bundleEntry.getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
}
}
@Test
public void testBundleBatchWithError() {
List<String> ids = createPatients(5);
Bundle input = new Bundle();
input.setType(BundleType.BATCH);
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(0));
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl("Patient/1000"); // not exist
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(1));
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(2));
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl("Patient/2000"); // not exist
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(3));
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl("Patient/3000"); // not exist
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(4));
Bundle output = myClient.transaction().withBundle(input).execute();
//ourLog.info("Bundle: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(8, output.getEntry().size());
List<BundleEntryComponent> bundleEntries = output.getEntry();
// patient 1
assertEquals(ids.get(0), bundleEntries.get(0).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
// patient 10 - error outcomes
assertThat(((OperationOutcome)bundleEntries.get(1).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics(), containsString("Patient/1000"));
// patient 2
assertEquals(ids.get(1), bundleEntries.get(2).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
// patient 3
assertEquals(ids.get(2), bundleEntries.get(3).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
// patient 20 - error outcomes
assertThat(((OperationOutcome)bundleEntries.get(4).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics(), containsString("Patient/2000"));
// patient 4
assertEquals(ids.get(3), bundleEntries.get(5).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
// patient 30 - error outcomes
assertThat(((OperationOutcome)bundleEntries.get(6).getResponse().getOutcome()).getIssueFirstRep().getDiagnostics(), containsString("Patient/3000"));
// patient 5
assertEquals(ids.get(4), bundleEntries.get(7).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
}
@Test
public void testBundleBatchWithCreate() {
List<String> ids = createPatients(5);
Bundle input = new Bundle();
input.setType(BundleType.BATCH);
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(0));
Patient p = new Patient();
p.setId("100");
p.setGender(AdministrativeGender.MALE);
p.addIdentifier().setSystem("urn:foo").setValue("A");
p.addName().setFamily("Smith");
input.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST);
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(1));
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(2));
Condition c = new Condition();
c.getSubject().setReference(ids.get(0));
input.addEntry().setResource(c).getRequest().setMethod(HTTPVerb.POST);
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(3));
input.addEntry().getRequest().setMethod(HTTPVerb.GET).setUrl(ids.get(4));
//ourLog.info("Bundle: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
Bundle output = myClient.transaction().withBundle(input).execute();
//ourLog.info("Bundle: \n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
assertEquals(7, output.getEntry().size());
List<BundleEntryComponent> bundleEntries = output.getEntry();
// patient 1
assertEquals(ids.get(0), bundleEntries.get(0).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
// patient create
assertThat(bundleEntries.get(1).getResponse().getStatus(), containsString("201"));
// patient 2
assertEquals(ids.get(1), bundleEntries.get(2).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
// patient 3
assertEquals(ids.get(2), bundleEntries.get(3).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
// condition create
assertThat(bundleEntries.get(4).getResponse().getStatus(), containsString("201"));
// patient 4
assertEquals(ids.get(3), bundleEntries.get(5).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
// patient 5
assertEquals(ids.get(4), bundleEntries.get(6).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
}
private List<String> createPatients(int count) {
List<String> ids = new ArrayList<String>();
for (int i = 0; i < count; i++) {
Patient patient = new Patient();
patient.setGender(AdministrativeGender.MALE);
patient.addIdentifier().setSystem("urn:foo").setValue("A");
patient.addName().setFamily("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".substring(i, i+1));
String id = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getValue();
ids.add(id);
}
return ids;
}
}

View File

@ -41,7 +41,7 @@ public class EmailSubscriptionDstu2Test extends BaseResourceProviderDstu2Test {
private static final Logger ourLog = LoggerFactory.getLogger(EmailSubscriptionDstu2Test.class);
@RegisterExtension
static GreenMailExtension ourGreenMail = new GreenMailExtension(ServerSetupTest.SMTP);
static GreenMailExtension ourGreenMail = new GreenMailExtension(ServerSetupTest.SMTP.withPort(0));
private List<IIdType> mySubscriptionIds = new ArrayList<>();
@ -148,7 +148,7 @@ public class EmailSubscriptionDstu2Test extends BaseResourceProviderDstu2Test {
private MailConfig withMailConfig() {
return new MailConfig()
.setSmtpHostname(ServerSetupTest.SMTP.getBindAddress())
.setSmtpPort(ServerSetupTest.SMTP.getPort());
.setSmtpPort(ourGreenMail.getSmtp().getPort());
}
}

View File

@ -42,7 +42,7 @@ public class EmailSubscriptionDstu3Test extends BaseResourceProviderDstu3Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(EmailSubscriptionDstu3Test.class);
@RegisterExtension
static GreenMailExtension ourGreenMail = new GreenMailExtension(ServerSetupTest.SMTP);
static GreenMailExtension ourGreenMail = new GreenMailExtension(ServerSetupTest.SMTP.withPort(0));
@Autowired
private SubscriptionTestUtil mySubscriptionTestUtil;
@ -261,7 +261,7 @@ public class EmailSubscriptionDstu3Test extends BaseResourceProviderDstu3Test {
private MailConfig withMailConfig() {
return new MailConfig()
.setSmtpHostname(ServerSetupTest.SMTP.getBindAddress())
.setSmtpPort(ServerSetupTest.SMTP.getPort());
.setSmtpPort(ourGreenMail.getSmtp().getPort());
}