Transaction SQL Optimization (#4679)
* Start work optimizing transaction * Tons of test cleanup * Cleanup * More optimization * Optimize * Many tests fixed * Work on test fixes * Optimization done, now doing cleanup * Cleanup * Add docs * Test fixes * Test fix * License headers * Test fix * Test cleanup * Test fix
This commit is contained in:
parent
90895aab57
commit
bcc1ca7593
|
@ -35,6 +35,7 @@ import org.apache.http.message.BasicNameValuePair;
|
||||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
import javax.annotation.Nullable;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
@ -54,7 +55,9 @@ import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
|
||||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||||
import static org.apache.commons.lang3.StringUtils.endsWith;
|
import static org.apache.commons.lang3.StringUtils.endsWith;
|
||||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
|
@SuppressWarnings("JavadocLinkAsPlainText")
|
||||||
public class UrlUtil {
|
public class UrlUtil {
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UrlUtil.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UrlUtil.class);
|
||||||
|
|
||||||
|
@ -134,6 +137,72 @@ public class UrlUtil {
|
||||||
return theExtensionUrl;
|
return theExtensionUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a FHIR resource URL, extracts the associated resource type. Supported formats
|
||||||
|
* include the following inputs, all of which will return {@literal Patient}. If no
|
||||||
|
* resource type can be determined, {@literal null} will be returned.
|
||||||
|
* <ul>
|
||||||
|
* <li>Patient
|
||||||
|
* <li>Patient?
|
||||||
|
* <li>Patient?identifier=foo
|
||||||
|
* <li>/Patient
|
||||||
|
* <li>/Patient?
|
||||||
|
* <li>/Patient?identifier=foo
|
||||||
|
* <li>http://foo/base/Patient?identifier=foo
|
||||||
|
* <li>http://foo/base/Patient/1
|
||||||
|
* <li>http://foo/base/Patient/1/_history/2
|
||||||
|
* <li>Patient/1
|
||||||
|
* <li>Patient/1/_history/2
|
||||||
|
* <li>/Patient/1
|
||||||
|
* <li>/Patient/1/_history/2
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public static String determineResourceTypeInResourceUrl(FhirContext theFhirContext, String theUrl) {
|
||||||
|
if (theUrl == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (theUrl.startsWith("urn:")) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
String resourceType = null;
|
||||||
|
int qmIndex = theUrl.indexOf("?");
|
||||||
|
if (qmIndex > 0) {
|
||||||
|
String urlResourceType = theUrl.substring(0, qmIndex);
|
||||||
|
int slashIdx = urlResourceType.lastIndexOf('/');
|
||||||
|
if (slashIdx != -1) {
|
||||||
|
urlResourceType = urlResourceType.substring(slashIdx + 1);
|
||||||
|
}
|
||||||
|
if (isNotBlank(urlResourceType)) {
|
||||||
|
resourceType = urlResourceType;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
resourceType = theUrl;
|
||||||
|
int slashIdx = resourceType.indexOf('/');
|
||||||
|
if (slashIdx == 0) {
|
||||||
|
resourceType = resourceType.substring(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
slashIdx = resourceType.indexOf('/');
|
||||||
|
if (slashIdx != -1) {
|
||||||
|
resourceType = new IdDt(resourceType).getResourceType();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (isNotBlank(resourceType)) {
|
||||||
|
theFhirContext.getResourceDefinition(resourceType);
|
||||||
|
}
|
||||||
|
} catch (DataFormatException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return resourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* URL encode a value according to RFC 3986
|
* URL encode a value according to RFC 3986
|
||||||
* <p>
|
* <p>
|
||||||
|
|
|
@ -61,6 +61,7 @@ ca.uhn.fhir.rest.server.method.ResourceParameter.noContentTypeInRequest=No Conte
|
||||||
ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1}
|
ca.uhn.fhir.rest.server.method.ResourceParameter.failedToParseRequest=Failed to parse request body as {0} resource. Error was: {1}
|
||||||
|
|
||||||
ca.uhn.fhir.parser.ParserState.wrongResourceTypeFound=Incorrect resource type found, expected "{0}" but found "{1}"
|
ca.uhn.fhir.parser.ParserState.wrongResourceTypeFound=Incorrect resource type found, expected "{0}" but found "{1}"
|
||||||
|
ca.uhn.fhir.rest.api.server.storage.TransactionDetails.invalidMatchUrlMultipleMatches=Invalid match URL "{0}" - Multiple resources match this search
|
||||||
ca.uhn.fhir.rest.server.RestfulServer.getPagesNonHttpGet=Requests for _getpages must use HTTP GET
|
ca.uhn.fhir.rest.server.RestfulServer.getPagesNonHttpGet=Requests for _getpages must use HTTP GET
|
||||||
ca.uhn.fhir.rest.server.RestfulServer.unknownMethod=Invalid request: The FHIR endpoint on this server does not know how to handle {0} operation[{1}] with parameters [{2}]
|
ca.uhn.fhir.rest.server.RestfulServer.unknownMethod=Invalid request: The FHIR endpoint on this server does not know how to handle {0} operation[{1}] with parameters [{2}]
|
||||||
ca.uhn.fhir.rest.server.RestfulServer.rootRequest=This is the base URL of FHIR server. Unable to handle this request, as it does not contain a resource type or operation name.
|
ca.uhn.fhir.rest.server.RestfulServer.rootRequest=This is the base URL of FHIR server. Unable to handle this request, as it does not contain a resource type or operation name.
|
||||||
|
@ -78,7 +79,6 @@ ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor.externalizedBinarySt
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidMatchUrlMultipleMatches=Invalid match URL "{0}" - Multiple resources match this search
|
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.inlineMatchNotSupported=Inline match URLs are not supported on this server. Cannot process reference: "{0}"
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.inlineMatchNotSupported=Inline match URLs are not supported on this server. Cannot process reference: "{0}"
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithMultipleMatchFailure=Failed to {0} resource with match URL "{1}" because this search matched {2} resources
|
||||||
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithIdNotMatchFailure=Failed to {0} resource with match URL "{1}" because the matching resource does not match the provided ID
|
ca.uhn.fhir.jpa.dao.BaseStorageDao.transactionOperationWithIdNotMatchFailure=Failed to {0} resource with match URL "{1}" because the matching resource does not match the provided ID
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 4679
|
||||||
|
title: "When executing FHIR transactions in the JPA server where the transaction contained large numbers
|
||||||
|
of inline match URLs, the transaction processor will now prefetch the match URL targets in a single
|
||||||
|
optimized batch. This avoids a potentially significant number of database round trips. In addition,
|
||||||
|
the SQL query used for pre-resolving resource update targets in the transaction processor has been
|
||||||
|
reworked to fully leverage available database indexes. This should result in significant performance
|
||||||
|
improvements for certain large FHIR transactions."
|
|
@ -377,6 +377,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
validateResourceIdCreation(theResource, theRequest);
|
validateResourceIdCreation(theResource, theRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (theMatchUrl != null) {
|
||||||
|
// Note: We actually create the search URL below by calling enforceMatchUrlResourceUniqueness
|
||||||
|
// since we can't do that until we know the assigned PID, but we set this flag up here
|
||||||
|
// because we need to set it before we persist the ResourceTable entity in order to
|
||||||
|
// avoid triggering an extra DB update
|
||||||
|
entity.setSearchUrlPresent(true);
|
||||||
|
}
|
||||||
|
|
||||||
// Perform actual DB update
|
// Perform actual DB update
|
||||||
// this call will also update the metadata
|
// this call will also update the metadata
|
||||||
ResourceTable updatedEntity = updateEntity(theRequest, theResource, entity, null, thePerformIndexing, false, theTransactionDetails, false, thePerformIndexing);
|
ResourceTable updatedEntity = updateEntity(theRequest, theResource, entity, null, thePerformIndexing, false, theTransactionDetails, false, thePerformIndexing);
|
||||||
|
@ -416,7 +424,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid);
|
theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid);
|
||||||
theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource);
|
theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource);
|
||||||
|
|
||||||
// Pre-cache the match URL
|
// Pre-cache the match URL, and create an entry in the HFJ_RES_SEARCH_URL table to
|
||||||
|
// protect against concurrent writes to the same conditional URL
|
||||||
if (theMatchUrl != null) {
|
if (theMatchUrl != null) {
|
||||||
myResourceSearchUrlSvc.enforceMatchUrlResourceUniqueness(getResourceName(), theMatchUrl, jpaPid);
|
myResourceSearchUrlSvc.enforceMatchUrlResourceUniqueness(getResourceName(), theMatchUrl, jpaPid);
|
||||||
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theMatchUrl, jpaPid);
|
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, getResourceName(), theMatchUrl, jpaPid);
|
||||||
|
@ -1777,8 +1786,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
||||||
|
|
||||||
// we stored a resource searchUrl at creation time to prevent resource duplication. Let's remove the entry on the
|
// we stored a resource searchUrl at creation time to prevent resource duplication. Let's remove the entry on the
|
||||||
// first update but guard against unnecessary trips to the database on subsequent ones.
|
// first update but guard against unnecessary trips to the database on subsequent ones.
|
||||||
if(theEntity.getVersion() < 2){
|
ResourceTable entity = (ResourceTable) theEntity;
|
||||||
|
if (entity.isSearchUrlPresent() && thePerformIndexing) {
|
||||||
myResourceSearchUrlSvc.deleteByResId((Long) theEntity.getPersistentId().getId());
|
myResourceSearchUrlSvc.deleteByResId((Long) theEntity.getPersistentId().getId());
|
||||||
|
entity.setSearchUrlPresent(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
return super.doUpdateForUpdateOrPatch(theRequest, theResourceId, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theResource, theEntity, theOperationType, theTransactionDetails);
|
return super.doUpdateForUpdateOrPatch(theRequest, theResourceId, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theResource, theEntity, theOperationType, theTransactionDetails);
|
||||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.dao;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
|
import ca.uhn.fhir.jpa.api.IDaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||||
|
@ -30,15 +31,20 @@ import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
import ca.uhn.fhir.rest.param.TokenParam;
|
import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
|
import ca.uhn.fhir.util.ResourceReferenceInfo;
|
||||||
import ca.uhn.fhir.util.StopWatch;
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
import com.google.common.collect.ArrayListMultimap;
|
||||||
|
import com.google.common.collect.ListMultimap;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.hibernate.internal.SessionImpl;
|
import org.hibernate.internal.SessionImpl;
|
||||||
import org.hl7.fhir.instance.model.api.IBase;
|
import org.hl7.fhir.instance.model.api.IBase;
|
||||||
|
@ -55,6 +61,7 @@ import javax.persistence.EntityManager;
|
||||||
import javax.persistence.PersistenceContext;
|
import javax.persistence.PersistenceContext;
|
||||||
import javax.persistence.PersistenceContextType;
|
import javax.persistence.PersistenceContextType;
|
||||||
import javax.persistence.PersistenceException;
|
import javax.persistence.PersistenceException;
|
||||||
|
import javax.persistence.Tuple;
|
||||||
import javax.persistence.TypedQuery;
|
import javax.persistence.TypedQuery;
|
||||||
import javax.persistence.criteria.CriteriaBuilder;
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
import javax.persistence.criteria.CriteriaQuery;
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
|
@ -62,18 +69,16 @@ import javax.persistence.criteria.Predicate;
|
||||||
import javax.persistence.criteria.Root;
|
import javax.persistence.criteria.Root;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.IdentityHashMap;
|
import java.util.IdentityHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static ca.uhn.fhir.jpa.dao.index.IdHelperService.EMPTY_PREDICATE_ARRAY;
|
import static ca.uhn.fhir.util.UrlUtil.determineResourceTypeInResourceUrl;
|
||||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
public class TransactionProcessor extends BaseTransactionProcessor {
|
public class TransactionProcessor extends BaseTransactionProcessor {
|
||||||
|
@ -118,6 +123,13 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
||||||
myFhirContext = theFhirContext;
|
myFhirContext = theFhirContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setStorageSettings(StorageSettings theStorageSettings) {
|
||||||
|
myStorageSettings = (JpaStorageSettings) theStorageSettings;
|
||||||
|
super.setStorageSettings(theStorageSettings);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected EntriesToProcessMap doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
protected EntriesToProcessMap doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||||
IdSubstitutionMap theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
IdSubstitutionMap theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
||||||
|
@ -128,203 +140,291 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
||||||
requestPartitionId = RequestPartitionId.allPartitions();
|
requestPartitionId = RequestPartitionId.allPartitions();
|
||||||
} else {
|
} else {
|
||||||
// If all entries in the transaction point to the exact same partition, we'll try and do a pre-fetch
|
// If all entries in the transaction point to the exact same partition, we'll try and do a pre-fetch
|
||||||
Set<RequestPartitionId> requestPartitionIdsForAllEntries = new HashSet<>();
|
requestPartitionId = getSinglePartitionForAllEntriesOrNull(theRequest, theEntries, versionAdapter);
|
||||||
for (IBase nextEntry : theEntries) {
|
|
||||||
IBaseResource resource = versionAdapter.getResource(nextEntry);
|
|
||||||
if (resource != null) {
|
|
||||||
RequestPartitionId requestPartition = myRequestPartitionSvc.determineCreatePartitionForRequest(theRequest, resource, myFhirContext.getResourceType(resource));
|
|
||||||
requestPartitionIdsForAllEntries.add(requestPartition);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (requestPartitionIdsForAllEntries.size() == 1) {
|
|
||||||
requestPartitionId = requestPartitionIdsForAllEntries.iterator().next();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (requestPartitionId != null) {
|
if (requestPartitionId != null) {
|
||||||
|
preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId);
|
||||||
Set<String> foundIds = new HashSet<>();
|
|
||||||
List<Long> idsToPreFetch = new ArrayList<>();
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Pre-Fetch any resources that are referred to normally by ID, e.g.
|
|
||||||
* regular FHIR updates within the transaction.
|
|
||||||
*/
|
|
||||||
List<IIdType> idsToPreResolve = new ArrayList<>();
|
|
||||||
for (IBase nextEntry : theEntries) {
|
|
||||||
IBaseResource resource = versionAdapter.getResource(nextEntry);
|
|
||||||
if (resource != null) {
|
|
||||||
String fullUrl = versionAdapter.getFullUrl(nextEntry);
|
|
||||||
boolean isPlaceholder = defaultString(fullUrl).startsWith("urn:");
|
|
||||||
if (!isPlaceholder) {
|
|
||||||
if (resource.getIdElement().hasIdPart() && resource.getIdElement().hasResourceType()) {
|
|
||||||
idsToPreResolve.add(resource.getIdElement());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
List<JpaPid> outcome = myIdHelperService.resolveResourcePersistentIdsWithCache(requestPartitionId, idsToPreResolve)
|
|
||||||
.stream().collect(Collectors.toList());
|
|
||||||
for (JpaPid next : outcome) {
|
|
||||||
foundIds.add(next.getAssociatedResourceId().toUnqualifiedVersionless().getValue());
|
|
||||||
theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next);
|
|
||||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY || !next.getAssociatedResourceId().isIdPartValidLong()) {
|
|
||||||
idsToPreFetch.add(next.getId());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (IIdType next : idsToPreResolve) {
|
|
||||||
if (!foundIds.contains(next.toUnqualifiedVersionless().getValue())) {
|
|
||||||
theTransactionDetails.addResolvedResourceId(next.toUnqualifiedVersionless(), null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Pre-resolve any conditional URLs we can
|
|
||||||
*/
|
|
||||||
List<MatchUrlToResolve> searchParameterMapsToResolve = new ArrayList<>();
|
|
||||||
for (IBase nextEntry : theEntries) {
|
|
||||||
IBaseResource resource = versionAdapter.getResource(nextEntry);
|
|
||||||
if (resource != null) {
|
|
||||||
String verb = versionAdapter.getEntryRequestVerb(myFhirContext, nextEntry);
|
|
||||||
String requestUrl = versionAdapter.getEntryRequestUrl(nextEntry);
|
|
||||||
String requestIfNoneExist = versionAdapter.getEntryIfNoneExist(nextEntry);
|
|
||||||
String resourceType = myFhirContext.getResourceType(resource);
|
|
||||||
if ("PUT".equals(verb) && requestUrl != null && requestUrl.contains("?")) {
|
|
||||||
JpaPid cachedId = myMatchResourceUrlService.processMatchUrlUsingCacheOnly(resourceType, requestUrl);
|
|
||||||
if (cachedId != null) {
|
|
||||||
idsToPreFetch.add(cachedId.getId());
|
|
||||||
} else if (SINGLE_PARAMETER_MATCH_URL_PATTERN.matcher(requestUrl).matches()) {
|
|
||||||
RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(resource);
|
|
||||||
SearchParameterMap matchUrlSearchMap = myMatchUrlService.translateMatchUrl(requestUrl, resourceDefinition);
|
|
||||||
searchParameterMapsToResolve.add(new MatchUrlToResolve(requestUrl, matchUrlSearchMap, resourceDefinition));
|
|
||||||
}
|
|
||||||
} else if ("POST".equals(verb) && requestIfNoneExist != null && requestIfNoneExist.contains("?")) {
|
|
||||||
JpaPid cachedId = myMatchResourceUrlService.processMatchUrlUsingCacheOnly(resourceType, requestIfNoneExist);
|
|
||||||
if (cachedId != null) {
|
|
||||||
idsToPreFetch.add(cachedId.getId());
|
|
||||||
} else if (SINGLE_PARAMETER_MATCH_URL_PATTERN.matcher(requestIfNoneExist).matches()) {
|
|
||||||
RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(resource);
|
|
||||||
SearchParameterMap matchUrlSearchMap = myMatchUrlService.translateMatchUrl(requestIfNoneExist, resourceDefinition);
|
|
||||||
searchParameterMapsToResolve.add(new MatchUrlToResolve(requestIfNoneExist, matchUrlSearchMap, resourceDefinition));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (searchParameterMapsToResolve.size() > 0) {
|
|
||||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
|
||||||
CriteriaQuery<ResourceIndexedSearchParamToken> cq = cb.createQuery(ResourceIndexedSearchParamToken.class);
|
|
||||||
Root<ResourceIndexedSearchParamToken> from = cq.from(ResourceIndexedSearchParamToken.class);
|
|
||||||
List<Predicate> orPredicates = new ArrayList<>();
|
|
||||||
|
|
||||||
for (MatchUrlToResolve next : searchParameterMapsToResolve) {
|
|
||||||
Collection<List<List<IQueryParameterType>>> values = next.myMatchUrlSearchMap.values();
|
|
||||||
if (values.size() == 1) {
|
|
||||||
List<List<IQueryParameterType>> andList = values.iterator().next();
|
|
||||||
IQueryParameterType param = andList.get(0).get(0);
|
|
||||||
|
|
||||||
if (param instanceof TokenParam) {
|
|
||||||
Predicate hashPredicate = buildHashPredicateFromTokenParam((TokenParam)param, requestPartitionId, cb, from, next);
|
|
||||||
|
|
||||||
if (hashPredicate != null) {
|
|
||||||
if (myPartitionSettings.isPartitioningEnabled() && !myPartitionSettings.isIncludePartitionInSearchHashes()) {
|
|
||||||
if (requestPartitionId.isDefaultPartition()) {
|
|
||||||
Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue").as(Integer.class));
|
|
||||||
hashPredicate = cb.and(hashPredicate, partitionIdCriteria);
|
|
||||||
} else if (!requestPartitionId.isAllPartitions()) {
|
|
||||||
Predicate partitionIdCriteria = from.get("myPartitionIdValue").as(Integer.class).in(requestPartitionId.getPartitionIds());
|
|
||||||
hashPredicate = cb.and(hashPredicate, partitionIdCriteria);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
orPredicates.add(hashPredicate);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
if (orPredicates.size() > 1) {
|
|
||||||
cq.where(cb.or(orPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
|
||||||
|
|
||||||
Map<Long, List<MatchUrlToResolve>> hashToSearchMap = buildHashToSearchMap(searchParameterMapsToResolve);
|
|
||||||
|
|
||||||
TypedQuery<ResourceIndexedSearchParamToken> query = myEntityManager.createQuery(cq);
|
|
||||||
List<ResourceIndexedSearchParamToken> results = query.getResultList();
|
|
||||||
|
|
||||||
for (ResourceIndexedSearchParamToken nextResult : results) {
|
|
||||||
Optional<List<MatchUrlToResolve>> matchedSearch = Optional.ofNullable(hashToSearchMap.get(nextResult.getHashSystemAndValue()));
|
|
||||||
if (!matchedSearch.isPresent()) {
|
|
||||||
matchedSearch = Optional.ofNullable(hashToSearchMap.get(nextResult.getHashValue()));
|
|
||||||
}
|
|
||||||
matchedSearch.ifPresent(matchUrlsToResolve -> {
|
|
||||||
matchUrlsToResolve.forEach(matchUrl -> {
|
|
||||||
setSearchToResolvedAndPrefetchFoundResourcePid(theTransactionDetails, idsToPreFetch, nextResult, matchUrl);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
//For each SP Map which did not return a result, tag it as not found.
|
|
||||||
searchParameterMapsToResolve.stream()
|
|
||||||
// No matches
|
|
||||||
.filter(match -> !match.myResolved)
|
|
||||||
.forEach(match -> {
|
|
||||||
ourLog.debug("Was unable to match url {} from database", match.myRequestUrl);
|
|
||||||
theTransactionDetails.addResolvedMatchUrl(match.myRequestUrl, TransactionDetails.NOT_FOUND);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
IFhirSystemDao<?,?> systemDao = myApplicationContext.getBean(IFhirSystemDao.class);
|
|
||||||
systemDao.preFetchResources(JpaPid.fromLongList(idsToPreFetch));
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return super.doTransactionWriteOperations(theRequest, theActionName, theTransactionDetails, theAllIds, theIdSubstitutions, theIdToPersistedOutcome, theResponse, theOriginalRequestOrder, theEntries, theTransactionStopWatch);
|
return super.doTransactionWriteOperations(theRequest, theActionName, theTransactionDetails, theAllIds, theIdSubstitutions, theIdToPersistedOutcome, theResponse, theOriginalRequestOrder, theEntries, theTransactionStopWatch);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void preFetch(TransactionDetails theTransactionDetails, List<IBase> theEntries, ITransactionProcessorVersionAdapter theVersionAdapter, RequestPartitionId theRequestPartitionId) {
|
||||||
|
Set<String> foundIds = new HashSet<>();
|
||||||
|
List<Long> idsToPreFetch = new ArrayList<>();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Pre-Fetch any resources that are referred to normally by ID, e.g.
|
||||||
|
* regular FHIR updates within the transaction.
|
||||||
|
*/
|
||||||
|
preFetchResourcesById(theTransactionDetails, theEntries, theVersionAdapter, theRequestPartitionId, foundIds, idsToPreFetch);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Pre-resolve any conditional URLs we can
|
||||||
|
*/
|
||||||
|
preFetchConditionalUrls(theTransactionDetails, theEntries, theVersionAdapter, theRequestPartitionId, idsToPreFetch);
|
||||||
|
|
||||||
|
IFhirSystemDao<?, ?> systemDao = myApplicationContext.getBean(IFhirSystemDao.class);
|
||||||
|
systemDao.preFetchResources(JpaPid.fromLongList(idsToPreFetch));
|
||||||
|
}
|
||||||
|
|
||||||
|
private void preFetchResourcesById(TransactionDetails theTransactionDetails, List<IBase> theEntries, ITransactionProcessorVersionAdapter theVersionAdapter, RequestPartitionId theRequestPartitionId, Set<String> foundIds, List<Long> idsToPreFetch) {
|
||||||
|
List<IIdType> idsToPreResolve = new ArrayList<>();
|
||||||
|
for (IBase nextEntry : theEntries) {
|
||||||
|
IBaseResource resource = theVersionAdapter.getResource(nextEntry);
|
||||||
|
if (resource != null) {
|
||||||
|
String verb = theVersionAdapter.getEntryRequestVerb(myFhirContext, nextEntry);
|
||||||
|
if ("PUT".equals(verb) || "PATCH".equals(verb)) {
|
||||||
|
String requestUrl = theVersionAdapter.getEntryRequestUrl(nextEntry);
|
||||||
|
if (countMatches(requestUrl, '/') == 1 && countMatches(requestUrl, '?') == 0) {
|
||||||
|
IIdType id = myFhirContext.getVersion().newIdType();
|
||||||
|
id.setValue(requestUrl);
|
||||||
|
idsToPreResolve.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
List<JpaPid> outcome = myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, idsToPreResolve)
|
||||||
|
.stream().collect(Collectors.toList());
|
||||||
|
for (JpaPid next : outcome) {
|
||||||
|
foundIds.add(next.getAssociatedResourceId().toUnqualifiedVersionless().getValue());
|
||||||
|
theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next);
|
||||||
|
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY || !next.getAssociatedResourceId().isIdPartValidLong()) {
|
||||||
|
idsToPreFetch.add(next.getId());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (IIdType next : idsToPreResolve) {
|
||||||
|
if (!foundIds.contains(next.toUnqualifiedVersionless().getValue())) {
|
||||||
|
theTransactionDetails.addResolvedResourceId(next.toUnqualifiedVersionless(), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void preFetchConditionalUrls(TransactionDetails theTransactionDetails, List<IBase> theEntries, ITransactionProcessorVersionAdapter theVersionAdapter, RequestPartitionId theRequestPartitionId, List<Long> idsToPreFetch) {
|
||||||
|
List<MatchUrlToResolve> searchParameterMapsToResolve = new ArrayList<>();
|
||||||
|
for (IBase nextEntry : theEntries) {
|
||||||
|
IBaseResource resource = theVersionAdapter.getResource(nextEntry);
|
||||||
|
if (resource != null) {
|
||||||
|
String verb = theVersionAdapter.getEntryRequestVerb(myFhirContext, nextEntry);
|
||||||
|
String requestUrl = theVersionAdapter.getEntryRequestUrl(nextEntry);
|
||||||
|
String requestIfNoneExist = theVersionAdapter.getEntryIfNoneExist(nextEntry);
|
||||||
|
String resourceType = determineResourceTypeInResourceUrl(myFhirContext, requestUrl);
|
||||||
|
if (resourceType == null && resource != null) {
|
||||||
|
resourceType = myFhirContext.getResourceType(resource);
|
||||||
|
}
|
||||||
|
if (("PUT".equals(verb) || "PATCH".equals(verb)) && requestUrl != null && requestUrl.contains("?")) {
|
||||||
|
preFetchConditionalUrl(resourceType, requestUrl, true, idsToPreFetch, searchParameterMapsToResolve);
|
||||||
|
} else if ("POST".equals(verb) && requestIfNoneExist != null && requestIfNoneExist.contains("?")) {
|
||||||
|
preFetchConditionalUrl(resourceType, requestIfNoneExist, false, idsToPreFetch, searchParameterMapsToResolve);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (myStorageSettings.isAllowInlineMatchUrlReferences()) {
|
||||||
|
List<ResourceReferenceInfo> references = myFhirContext.newTerser().getAllResourceReferences(resource);
|
||||||
|
for (ResourceReferenceInfo next : references) {
|
||||||
|
String referenceUrl = next.getResourceReference().getReferenceElement().getValue();
|
||||||
|
String refResourceType = determineResourceTypeInResourceUrl(myFhirContext, referenceUrl);
|
||||||
|
if (refResourceType != null) {
|
||||||
|
preFetchConditionalUrl(refResourceType, referenceUrl, false, idsToPreFetch, searchParameterMapsToResolve);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
new QueryChunker<MatchUrlToResolve>()
|
||||||
|
.chunk(searchParameterMapsToResolve, 100, map ->
|
||||||
|
preFetchSearchParameterMaps(theTransactionDetails, theRequestPartitionId, map, idsToPreFetch));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param theTransactionDetails The active transaction details
|
||||||
|
* @param theRequestPartitionId The active partition
|
||||||
|
* @param theInputParameters These are the search parameter maps that will actually be resolved
|
||||||
|
* @param theOutputPidsToLoadFully This list will be added to with any resource PIDs that need to be fully
|
||||||
|
* pre-loaded (ie. fetch the actual resource body since we're presumably
|
||||||
|
* going to update it and will need to see its current state eventually)
|
||||||
|
*/
|
||||||
|
private void preFetchSearchParameterMaps(TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId, List<MatchUrlToResolve> theInputParameters, List<Long> theOutputPidsToLoadFully) {
|
||||||
|
Set<Long> systemAndValueHashes = new HashSet<>();
|
||||||
|
Set<Long> valueHashes = new HashSet<>();
|
||||||
|
for (MatchUrlToResolve next : theInputParameters) {
|
||||||
|
Collection<List<List<IQueryParameterType>>> values = next.myMatchUrlSearchMap.values();
|
||||||
|
if (values.size() == 1) {
|
||||||
|
List<List<IQueryParameterType>> andList = values.iterator().next();
|
||||||
|
IQueryParameterType param = andList.get(0).get(0);
|
||||||
|
|
||||||
|
if (param instanceof TokenParam) {
|
||||||
|
buildHashPredicateFromTokenParam((TokenParam) param, theRequestPartitionId, next, systemAndValueHashes, valueHashes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
preFetchSearchParameterMapsToken("myHashSystemAndValue", systemAndValueHashes, theTransactionDetails, theRequestPartitionId, theInputParameters, theOutputPidsToLoadFully);
|
||||||
|
preFetchSearchParameterMapsToken("myHashValue", valueHashes, theTransactionDetails, theRequestPartitionId, theInputParameters, theOutputPidsToLoadFully);
|
||||||
|
|
||||||
|
//For each SP Map which did not return a result, tag it as not found.
|
||||||
|
if (!valueHashes.isEmpty() || !systemAndValueHashes.isEmpty()) {
|
||||||
|
theInputParameters.stream()
|
||||||
|
// No matches
|
||||||
|
.filter(match -> !match.myResolved)
|
||||||
|
.forEach(match -> {
|
||||||
|
ourLog.debug("Was unable to match url {} from database", match.myRequestUrl);
|
||||||
|
theTransactionDetails.addResolvedMatchUrl(myFhirContext, match.myRequestUrl, TransactionDetails.NOT_FOUND);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Here we do a select against the {@link ResourceIndexedSearchParamToken} table for any rows that have the
|
||||||
|
* specific sys+val or val hashes we know we need to pre-fetch.
|
||||||
|
* <p>
|
||||||
|
* Note that we do a tuple query for only 2 columns in order to ensure that we can get by with only
|
||||||
|
* the data in the index (ie no need to load the actual table rows).
|
||||||
|
*/
|
||||||
|
private void preFetchSearchParameterMapsToken(String theIndexColumnName, Set<Long> theHashesForIndexColumn, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId, List<MatchUrlToResolve> theInputParameters, List<Long> theOutputPidsToLoadFully) {
|
||||||
|
if (!theHashesForIndexColumn.isEmpty()) {
|
||||||
|
ListMultimap<Long, MatchUrlToResolve> hashToSearchMap = buildHashToSearchMap(theInputParameters, theIndexColumnName);
|
||||||
|
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||||
|
CriteriaQuery<Tuple> cq = cb.createTupleQuery();
|
||||||
|
Root<ResourceIndexedSearchParamToken> from = cq.from(ResourceIndexedSearchParamToken.class);
|
||||||
|
cq.multiselect(from.get("myResourcePid").as(Long.class), from.get(theIndexColumnName).as(Long.class));
|
||||||
|
|
||||||
|
Predicate masterPredicate;
|
||||||
|
if (theHashesForIndexColumn.size() == 1) {
|
||||||
|
masterPredicate = cb.equal(from.get(theIndexColumnName).as(Long.class), theHashesForIndexColumn.iterator().next());
|
||||||
|
} else {
|
||||||
|
masterPredicate = from.get(theIndexColumnName).as(Long.class).in(theHashesForIndexColumn);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (myPartitionSettings.isPartitioningEnabled() && !myPartitionSettings.isIncludePartitionInSearchHashes()) {
|
||||||
|
if (theRequestPartitionId.isDefaultPartition()) {
|
||||||
|
Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue").as(Integer.class));
|
||||||
|
masterPredicate = cb.and(partitionIdCriteria, masterPredicate);
|
||||||
|
} else if (!theRequestPartitionId.isAllPartitions()) {
|
||||||
|
Predicate partitionIdCriteria = from.get("myPartitionIdValue").as(Integer.class).in(theRequestPartitionId.getPartitionIds());
|
||||||
|
masterPredicate = cb.and(partitionIdCriteria, masterPredicate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cq.where(masterPredicate);
|
||||||
|
|
||||||
|
TypedQuery<Tuple> query = myEntityManager.createQuery(cq);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* If we have 10 unique conditional URLs we're resolving, each one should
|
||||||
|
* resolve to 0..1 resources if they are valid as conditional URLs. So we would
|
||||||
|
* expect this query to return 0..10 rows, since conditional URLs for all
|
||||||
|
* conditional operations except DELETE (which isn't being applied here) are
|
||||||
|
* only allowed to resolve to 0..1 resources.
|
||||||
|
*
|
||||||
|
* If a conditional URL matches 2+ resources that is an error, and we'll
|
||||||
|
* be throwing an exception below. This limit is here for safety just to
|
||||||
|
* ensure that if someone uses a conditional URL that matches a million resources,
|
||||||
|
* we don't do a super-expensive fetch.
|
||||||
|
*/
|
||||||
|
query.setMaxResults(theHashesForIndexColumn.size() + 1);
|
||||||
|
|
||||||
|
List<Tuple> results = query.getResultList();
|
||||||
|
|
||||||
|
for (Tuple nextResult : results) {
|
||||||
|
Long nextResourcePid = nextResult.get(0, Long.class);
|
||||||
|
Long nextHash = nextResult.get(1, Long.class);
|
||||||
|
List<MatchUrlToResolve> matchedSearch = hashToSearchMap.get(nextHash);
|
||||||
|
matchedSearch.forEach(matchUrl -> {
|
||||||
|
ourLog.debug("Matched url {} from database", matchUrl.myRequestUrl);
|
||||||
|
if (matchUrl.myShouldPreFetchResourceBody) {
|
||||||
|
theOutputPidsToLoadFully.add(nextResourcePid);
|
||||||
|
}
|
||||||
|
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, matchUrl.myResourceDefinition.getName(), matchUrl.myRequestUrl, JpaPid.fromId(nextResourcePid));
|
||||||
|
theTransactionDetails.addResolvedMatchUrl(myFhirContext, matchUrl.myRequestUrl, JpaPid.fromId(nextResourcePid));
|
||||||
|
matchUrl.setResolved(true);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Note that if {@literal theShouldPreFetchResourceBody} is false, then we'll check if a given match
|
||||||
|
* URL resolves to a resource PID, but we won't actually try to load that resource. If we're resolving
|
||||||
|
* a match URL because it's there for a conditional update, we'll eagerly fetch the
|
||||||
|
* actual resource because we need to know its current state in order to update it. However, if
|
||||||
|
* the match URL is from an inline match URL in a resource body, we really only care about
|
||||||
|
* the PID and don't need the body so we don't load it. This does have a security implication, since
|
||||||
|
* it means that the {@link ca.uhn.fhir.interceptor.api.Pointcut#STORAGE_PRESHOW_RESOURCES} pointcut
|
||||||
|
* isn't fired even though the user has resolved the URL (meaning they may be able to test for
|
||||||
|
* the existence of a resource using a match URL). There is a test for this called
|
||||||
|
* {@literal testTransactionCreateInlineMatchUrlWithAuthorizationDenied()}. This security tradeoff
|
||||||
|
* is acceptable since we're only prefetching things with very simple match URLs (nothing with
|
||||||
|
* a reference in it for example) so it's not really possible to doing anything useful with this.
|
||||||
|
*
|
||||||
|
* @param theResourceType The resource type associated with the match URL (ie what resource type should it resolve to)
|
||||||
|
* @param theRequestUrl The actual match URL, which could be as simple as just parameters or could include the resource type too
|
||||||
|
* @param theShouldPreFetchResourceBody Should we also fetch the actual resource body, or just figure out the PID associated with it. See the method javadoc above for some context.
|
||||||
|
* @param theOutputIdsToPreFetch This will be populated with any resource PIDs that need to be pre-fetched
|
||||||
|
* @param theOutputSearchParameterMapsToResolve This will be populated with any {@link SearchParameterMap} instances corresponding to match URLs we need to resolve
|
||||||
|
*/
|
||||||
|
private void preFetchConditionalUrl(String theResourceType, String theRequestUrl, boolean theShouldPreFetchResourceBody, List<Long> theOutputIdsToPreFetch, List<MatchUrlToResolve> theOutputSearchParameterMapsToResolve) {
|
||||||
|
JpaPid cachedId = myMatchResourceUrlService.processMatchUrlUsingCacheOnly(theResourceType, theRequestUrl);
|
||||||
|
if (cachedId != null) {
|
||||||
|
if (theShouldPreFetchResourceBody) {
|
||||||
|
theOutputIdsToPreFetch.add(cachedId.getId());
|
||||||
|
}
|
||||||
|
} else if (SINGLE_PARAMETER_MATCH_URL_PATTERN.matcher(theRequestUrl).matches()) {
|
||||||
|
RuntimeResourceDefinition resourceDefinition = myFhirContext.getResourceDefinition(theResourceType);
|
||||||
|
SearchParameterMap matchUrlSearchMap = myMatchUrlService.translateMatchUrl(theRequestUrl, resourceDefinition);
|
||||||
|
theOutputSearchParameterMapsToResolve.add(new MatchUrlToResolve(theRequestUrl, matchUrlSearchMap, resourceDefinition, theShouldPreFetchResourceBody));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private RequestPartitionId getSinglePartitionForAllEntriesOrNull(RequestDetails theRequest, List<IBase> theEntries, ITransactionProcessorVersionAdapter versionAdapter) {
|
||||||
|
RequestPartitionId retVal = null;
|
||||||
|
Set<RequestPartitionId> requestPartitionIdsForAllEntries = new HashSet<>();
|
||||||
|
for (IBase nextEntry : theEntries) {
|
||||||
|
IBaseResource resource = versionAdapter.getResource(nextEntry);
|
||||||
|
if (resource != null) {
|
||||||
|
RequestPartitionId requestPartition = myRequestPartitionSvc.determineCreatePartitionForRequest(theRequest, resource, myFhirContext.getResourceType(resource));
|
||||||
|
requestPartitionIdsForAllEntries.add(requestPartition);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (requestPartitionIdsForAllEntries.size() == 1) {
|
||||||
|
retVal = requestPartitionIdsForAllEntries.iterator().next();
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a token parameter, build the query predicate based on its hash. Uses system and value if both are available, otherwise just value.
|
* Given a token parameter, build the query predicate based on its hash. Uses system and value if both are available, otherwise just value.
|
||||||
* If neither are available, it returns null.
|
* If neither are available, it returns null.
|
||||||
*/
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
private Predicate buildHashPredicateFromTokenParam(TokenParam theTokenParam, RequestPartitionId theRequestPartitionId, CriteriaBuilder cb, Root<ResourceIndexedSearchParamToken> from, MatchUrlToResolve theMatchUrl) {
|
private void buildHashPredicateFromTokenParam(TokenParam theTokenParam, RequestPartitionId theRequestPartitionId, MatchUrlToResolve theMatchUrl, Set<Long> theSysAndValuePredicates, Set<Long> theValuePredicates) {
|
||||||
Predicate hashPredicate = null;
|
|
||||||
if (isNotBlank(theTokenParam.getValue()) && isNotBlank(theTokenParam.getSystem())) {
|
if (isNotBlank(theTokenParam.getValue()) && isNotBlank(theTokenParam.getSystem())) {
|
||||||
theMatchUrl.myHashSystemAndValue = ResourceIndexedSearchParamToken.calculateHashSystemAndValue(myPartitionSettings, theRequestPartitionId, theMatchUrl.myResourceDefinition.getName(), theMatchUrl.myMatchUrlSearchMap.keySet().iterator().next(), theTokenParam.getSystem(), theTokenParam.getValue());
|
theMatchUrl.myHashSystemAndValue = ResourceIndexedSearchParamToken.calculateHashSystemAndValue(myPartitionSettings, theRequestPartitionId, theMatchUrl.myResourceDefinition.getName(), theMatchUrl.myMatchUrlSearchMap.keySet().iterator().next(), theTokenParam.getSystem(), theTokenParam.getValue());
|
||||||
hashPredicate = cb.equal(from.get("myHashSystemAndValue").as(Long.class), theMatchUrl.myHashSystemAndValue);
|
theSysAndValuePredicates.add(theMatchUrl.myHashSystemAndValue);
|
||||||
} else if (isNotBlank(theTokenParam.getValue())) {
|
} else if (isNotBlank(theTokenParam.getValue())) {
|
||||||
theMatchUrl.myHashValue = ResourceIndexedSearchParamToken.calculateHashValue(myPartitionSettings, theRequestPartitionId, theMatchUrl.myResourceDefinition.getName(), theMatchUrl.myMatchUrlSearchMap.keySet().iterator().next(), theTokenParam.getValue());
|
theMatchUrl.myHashValue = ResourceIndexedSearchParamToken.calculateHashValue(myPartitionSettings, theRequestPartitionId, theMatchUrl.myResourceDefinition.getName(), theMatchUrl.myMatchUrlSearchMap.keySet().iterator().next(), theTokenParam.getValue());
|
||||||
hashPredicate = cb.equal(from.get("myHashValue").as(Long.class), theMatchUrl.myHashValue);
|
theValuePredicates.add(theMatchUrl.myHashValue);
|
||||||
}
|
}
|
||||||
return hashPredicate;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<Long, List<MatchUrlToResolve>> buildHashToSearchMap(List<MatchUrlToResolve> searchParameterMapsToResolve) {
|
private ListMultimap<Long, MatchUrlToResolve> buildHashToSearchMap(List<MatchUrlToResolve> searchParameterMapsToResolve, String theIndex) {
|
||||||
Map<Long, List<MatchUrlToResolve>> hashToSearch = new HashMap<>();
|
ListMultimap<Long, MatchUrlToResolve> hashToSearch = ArrayListMultimap.create();
|
||||||
//Build a lookup map so we don't have to iterate over the searches repeatedly.
|
//Build a lookup map so we don't have to iterate over the searches repeatedly.
|
||||||
for (MatchUrlToResolve nextSearchParameterMap : searchParameterMapsToResolve) {
|
for (MatchUrlToResolve nextSearchParameterMap : searchParameterMapsToResolve) {
|
||||||
if (nextSearchParameterMap.myHashSystemAndValue != null) {
|
if (nextSearchParameterMap.myHashSystemAndValue != null && theIndex.equals("myHashSystemAndValue")) {
|
||||||
List<MatchUrlToResolve> matchUrlsToResolve = hashToSearch.getOrDefault(nextSearchParameterMap.myHashSystemAndValue, new ArrayList<>());
|
hashToSearch.put(nextSearchParameterMap.myHashSystemAndValue, nextSearchParameterMap);
|
||||||
matchUrlsToResolve.add(nextSearchParameterMap);
|
|
||||||
hashToSearch.put(nextSearchParameterMap.myHashSystemAndValue, matchUrlsToResolve);
|
|
||||||
}
|
}
|
||||||
if (nextSearchParameterMap.myHashValue!= null) {
|
if (nextSearchParameterMap.myHashValue != null && theIndex.equals("myHashValue")) {
|
||||||
List<MatchUrlToResolve> matchUrlsToResolve = hashToSearch.getOrDefault(nextSearchParameterMap.myHashValue, new ArrayList<>());
|
hashToSearch.put(nextSearchParameterMap.myHashValue, nextSearchParameterMap);
|
||||||
matchUrlsToResolve.add(nextSearchParameterMap);
|
|
||||||
hashToSearch.put(nextSearchParameterMap.myHashValue, matchUrlsToResolve);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return hashToSearch;
|
return hashToSearch;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setSearchToResolvedAndPrefetchFoundResourcePid(TransactionDetails theTransactionDetails, List<Long> idsToPreFetch, ResourceIndexedSearchParamToken nextResult, MatchUrlToResolve nextSearchParameterMap) {
|
|
||||||
ourLog.debug("Matched url {} from database", nextSearchParameterMap.myRequestUrl);
|
|
||||||
idsToPreFetch.add(nextResult.getResourcePid());
|
|
||||||
myMatchResourceUrlService.matchUrlResolved(theTransactionDetails, nextSearchParameterMap.myResourceDefinition.getName(), nextSearchParameterMap.myRequestUrl, JpaPid.fromId(nextResult.getResourcePid()));
|
|
||||||
theTransactionDetails.addResolvedMatchUrl(nextSearchParameterMap.myRequestUrl, JpaPid.fromId(nextResult.getResourcePid()));
|
|
||||||
nextSearchParameterMap.setResolved(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void flushSession(Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome) {
|
protected void flushSession(Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome) {
|
||||||
try {
|
try {
|
||||||
|
@ -372,15 +472,18 @@ public class TransactionProcessor extends BaseTransactionProcessor {
|
||||||
private final String myRequestUrl;
|
private final String myRequestUrl;
|
||||||
private final SearchParameterMap myMatchUrlSearchMap;
|
private final SearchParameterMap myMatchUrlSearchMap;
|
||||||
private final RuntimeResourceDefinition myResourceDefinition;
|
private final RuntimeResourceDefinition myResourceDefinition;
|
||||||
|
private final boolean myShouldPreFetchResourceBody;
|
||||||
public boolean myResolved;
|
public boolean myResolved;
|
||||||
private Long myHashValue;
|
private Long myHashValue;
|
||||||
private Long myHashSystemAndValue;
|
private Long myHashSystemAndValue;
|
||||||
|
|
||||||
public MatchUrlToResolve(String theRequestUrl, SearchParameterMap theMatchUrlSearchMap, RuntimeResourceDefinition theResourceDefinition) {
|
public MatchUrlToResolve(String theRequestUrl, SearchParameterMap theMatchUrlSearchMap, RuntimeResourceDefinition theResourceDefinition, boolean theShouldPreFetchResourceBody) {
|
||||||
myRequestUrl = theRequestUrl;
|
myRequestUrl = theRequestUrl;
|
||||||
myMatchUrlSearchMap = theMatchUrlSearchMap;
|
myMatchUrlSearchMap = theMatchUrlSearchMap;
|
||||||
myResourceDefinition = theResourceDefinition;
|
myResourceDefinition = theResourceDefinition;
|
||||||
|
myShouldPreFetchResourceBody = theShouldPreFetchResourceBody;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setResolved(boolean theResolved) {
|
public void setResolved(boolean theResolved) {
|
||||||
myResolved = theResolved;
|
myResolved = theResolved;
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,6 +60,7 @@ import javax.annotation.Nullable;
|
||||||
import javax.persistence.EntityManager;
|
import javax.persistence.EntityManager;
|
||||||
import javax.persistence.PersistenceContext;
|
import javax.persistence.PersistenceContext;
|
||||||
import javax.persistence.PersistenceContextType;
|
import javax.persistence.PersistenceContextType;
|
||||||
|
import javax.persistence.Tuple;
|
||||||
import javax.persistence.TypedQuery;
|
import javax.persistence.TypedQuery;
|
||||||
import javax.persistence.criteria.CriteriaBuilder;
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
import javax.persistence.criteria.CriteriaQuery;
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
|
@ -345,9 +346,22 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
||||||
|
|
||||||
private void doResolvePersistentIds(RequestPartitionId theRequestPartitionId, List<IIdType> theIds, List<JpaPid> theOutputListToPopulate) {
|
private void doResolvePersistentIds(RequestPartitionId theRequestPartitionId, List<IIdType> theIds, List<JpaPid> theOutputListToPopulate) {
|
||||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||||
CriteriaQuery<ForcedId> criteriaQuery = cb.createQuery(ForcedId.class);
|
CriteriaQuery<Tuple> criteriaQuery = cb.createTupleQuery();
|
||||||
Root<ForcedId> from = criteriaQuery.from(ForcedId.class);
|
Root<ForcedId> from = criteriaQuery.from(ForcedId.class);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* We don't currently have an index that satisfies these three columns, but the
|
||||||
|
* index IDX_FORCEDID_TYPE_FID does include myResourceType and myForcedId
|
||||||
|
* so we're at least minimizing the amount of data we fetch. A largescale test
|
||||||
|
* on Postgres does confirm that this lookup does use the index and is pretty
|
||||||
|
* performant.
|
||||||
|
*/
|
||||||
|
criteriaQuery.multiselect(
|
||||||
|
from.get("myResourcePid").as(Long.class),
|
||||||
|
from.get("myResourceType").as(String.class),
|
||||||
|
from.get("myForcedId").as(String.class)
|
||||||
|
);
|
||||||
|
|
||||||
List<Predicate> predicates = new ArrayList<>(theIds.size());
|
List<Predicate> predicates = new ArrayList<>(theIds.size());
|
||||||
for (IIdType next : theIds) {
|
for (IIdType next : theIds) {
|
||||||
|
|
||||||
|
@ -366,16 +380,19 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
||||||
|
|
||||||
criteriaQuery.where(cb.or(predicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
criteriaQuery.where(cb.or(predicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||||
|
|
||||||
TypedQuery<ForcedId> query = myEntityManager.createQuery(criteriaQuery);
|
TypedQuery<Tuple> query = myEntityManager.createQuery(criteriaQuery);
|
||||||
List<ForcedId> results = query.getResultList();
|
List<Tuple> results = query.getResultList();
|
||||||
for (ForcedId nextId : results) {
|
for (Tuple nextId : results) {
|
||||||
// Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
|
// Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
|
||||||
if (nextId.getResourceId() != null) {
|
Long resourceId = nextId.get(0, Long.class);
|
||||||
JpaPid jpaPid = JpaPid.fromId(nextId.getResourceId());
|
String resourceType = nextId.get(1, String.class);
|
||||||
populateAssociatedResourceId(nextId.getResourceType(), nextId.getForcedId(), jpaPid);
|
String forcedId = nextId.get(2, String.class);
|
||||||
|
if (resourceId != null) {
|
||||||
|
JpaPid jpaPid = JpaPid.fromId(resourceId);
|
||||||
|
populateAssociatedResourceId(resourceType, forcedId, jpaPid);
|
||||||
theOutputListToPopulate.add(jpaPid);
|
theOutputListToPopulate.add(jpaPid);
|
||||||
|
|
||||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getForcedId());
|
String key = toForcedIdToPidKey(theRequestPartitionId, resourceType, forcedId);
|
||||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, jpaPid);
|
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, jpaPid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
|
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
|
||||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||||
import ca.uhn.fhir.util.VersionEnum;
|
import ca.uhn.fhir.util.VersionEnum;
|
||||||
|
@ -200,6 +201,12 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
.nullable()
|
.nullable()
|
||||||
.type(ColumnTypeEnum.DATE_ONLY);
|
.type(ColumnTypeEnum.DATE_ONLY);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
version
|
||||||
|
.onTable(ResourceTable.HFJ_RESOURCE)
|
||||||
|
.addColumn("20230323.1", "SEARCH_URL_PRESENT")
|
||||||
|
.nullable()
|
||||||
|
.type(ColumnTypeEnum.BOOLEAN);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void init640() {
|
protected void init640() {
|
||||||
|
|
|
@ -298,6 +298,15 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
||||||
@OptimisticLock(excluded = true)
|
@OptimisticLock(excluded = true)
|
||||||
private String myFhirId;
|
private String myFhirId;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is there a corresponding row in {@link ResourceSearchUrlEntity} for
|
||||||
|
* this row.
|
||||||
|
* TODO: Added in 6.6.0 - Should make this a primitive boolean at some point
|
||||||
|
*/
|
||||||
|
@OptimisticLock(excluded = true)
|
||||||
|
@Column(name = "SEARCH_URL_PRESENT", nullable = true)
|
||||||
|
private Boolean mySearchUrlPresent = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Populate myFhirId with server-assigned sequence id when no client-id provided.
|
* Populate myFhirId with server-assigned sequence id when no client-id provided.
|
||||||
* We eat this complexity during insert to simplify query time with a uniform column.
|
* We eat this complexity during insert to simplify query time with a uniform column.
|
||||||
|
@ -694,6 +703,14 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
||||||
myNarrativeText = theNarrativeText;
|
myNarrativeText = theNarrativeText;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isSearchUrlPresent() {
|
||||||
|
return Boolean.TRUE.equals(mySearchUrlPresent);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSearchUrlPresent(boolean theSearchUrlPresent) {
|
||||||
|
mySearchUrlPresent = theSearchUrlPresent;
|
||||||
|
}
|
||||||
|
|
||||||
public ResourceHistoryTable toHistory(boolean theCreateVersionTags) {
|
public ResourceHistoryTable toHistory(boolean theCreateVersionTags) {
|
||||||
ResourceHistoryTable retVal = new ResourceHistoryTable();
|
ResourceHistoryTable retVal = new ResourceHistoryTable();
|
||||||
|
|
||||||
|
|
|
@ -381,7 +381,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertThat(e.getMessage(), containsString("with match URL \"Patient"));
|
assertThat(e.getMessage(), containsString("Multiple resources match this search"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1350,7 +1350,7 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertThat(e.getMessage(), containsString("with match URL \"Patient"));
|
assertThat(e.getMessage(), containsString("Multiple resources match this search"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3566,7 +3566,7 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
|
||||||
assertEquals(10, myCaptureQueriesListener.countSelectQueries());
|
assertEquals(10, myCaptureQueriesListener.countSelectQueries());
|
||||||
assertEquals(5, myCaptureQueriesListener.countUpdateQueries());
|
assertEquals(5, myCaptureQueriesListener.countUpdateQueries());
|
||||||
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(1, myCaptureQueriesListener.countInsertQueries());
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueries());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
String unformattedSql = myCaptureQueriesListener.getUpdateQueriesForCurrentThread().get(0).getSql(true, false);
|
String unformattedSql = myCaptureQueriesListener.getUpdateQueriesForCurrentThread().get(0).getSql(true, false);
|
||||||
assertThat(unformattedSql, stringContainsInOrder(
|
assertThat(unformattedSql, stringContainsInOrder(
|
||||||
"SRC_PATH='Observation.performer'",
|
"SRC_PATH='Observation.performer'",
|
||||||
|
|
|
@ -829,7 +829,7 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertEquals(Msg.code(1092) + "Invalid match URL \"Patient?identifier=urn%3Asystem%7CtestTransactionCreateInlineMatchUrlWithTwoMatches\" - Multiple resources match this search", e.getMessage());
|
assertEquals(Msg.code(2207) + "Invalid match URL \"Patient?identifier=urn%3Asystem%7CtestTransactionCreateInlineMatchUrlWithTwoMatches\" - Multiple resources match this search", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -905,7 +905,7 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertThat(e.getMessage(), containsString("with match URL \"Patient"));
|
assertThat(e.getMessage(), containsString("Multiple resources match this search"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2052,7 +2052,7 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertThat(e.getMessage(), containsString("with match URL \"Patient"));
|
assertThat(e.getMessage(), containsString("Multiple resources match this search"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2451,7 +2451,7 @@ public class FhirSystemDaoDstu3Test extends BaseJpaDstu3SystemTest {
|
||||||
mySystemDao.transaction(mySrd, bundle);
|
mySystemDao.transaction(mySrd, bundle);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertEquals(Msg.code(1092) + "Invalid match URL \"Patient?identifier=http://www.ghh.org/identifiers|condreftestpatid1\" - Multiple resources match this search", e.getMessage());
|
assertEquals(Msg.code(2207) + "Invalid match URL \"Patient?identifier=http://www.ghh.org/identifiers|condreftestpatid1\" - Multiple resources match this search", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,7 +66,7 @@ public class ConsumeFilesStepR4Test extends BaseJpaR4Test {
|
||||||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||||
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
|
||||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||||
assertEquals(2, myCaptureQueriesListener.countDeleteQueries());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ public class ConsumeFilesStepR4Test extends BaseJpaR4Test {
|
||||||
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
|
||||||
assertEquals(2, myCaptureQueriesListener.logInsertQueries());
|
assertEquals(2, myCaptureQueriesListener.logInsertQueries());
|
||||||
assertEquals(4, myCaptureQueriesListener.logUpdateQueries());
|
assertEquals(4, myCaptureQueriesListener.logUpdateQueries());
|
||||||
assertEquals(2, myCaptureQueriesListener.countDeleteQueries());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.dao;
|
||||||
import ca.uhn.fhir.context.FhirContext;
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||||
|
import ca.uhn.fhir.jpa.api.IDaoRegistry;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
import ca.uhn.fhir.jpa.api.config.ThreadPoolFactoryConfig;
|
import ca.uhn.fhir.jpa.api.config.ThreadPoolFactoryConfig;
|
||||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||||
|
@ -26,6 +27,8 @@ import org.hl7.fhir.r4.model.Meta;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.CsvSource;
|
||||||
import org.mockito.Answers;
|
import org.mockito.Answers;
|
||||||
import org.mockito.Spy;
|
import org.mockito.Spy;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -45,6 +48,7 @@ import javax.persistence.EntityManager;
|
||||||
import javax.persistence.EntityManagerFactory;
|
import javax.persistence.EntityManagerFactory;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
import static org.junit.jupiter.api.Assertions.fail;
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.eq;
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
|
@ -59,6 +63,8 @@ public class TransactionProcessorTest {
|
||||||
@Autowired
|
@Autowired
|
||||||
private TransactionProcessor myTransactionProcessor;
|
private TransactionProcessor myTransactionProcessor;
|
||||||
@MockBean
|
@MockBean
|
||||||
|
private DaoRegistry myDaoRegistry;
|
||||||
|
@MockBean
|
||||||
private EntityManagerFactory myEntityManagerFactory;
|
private EntityManagerFactory myEntityManagerFactory;
|
||||||
@MockBean(answer = Answers.RETURNS_DEEP_STUBS)
|
@MockBean(answer = Answers.RETURNS_DEEP_STUBS)
|
||||||
private EntityManager myEntityManager;
|
private EntityManager myEntityManager;
|
||||||
|
@ -115,6 +121,7 @@ public class TransactionProcessorTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
@Import(ThreadPoolFactoryConfig.class)
|
@Import(ThreadPoolFactoryConfig.class)
|
||||||
public static class MyConfig {
|
public static class MyConfig {
|
||||||
|
|
|
@ -28,6 +28,7 @@ import javax.servlet.ServletException;
|
||||||
import java.time.LocalDate;
|
import java.time.LocalDate;
|
||||||
import java.time.Month;
|
import java.time.Month;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
@ -207,15 +208,20 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
|
||||||
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
|
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
|
||||||
public RequestPartitionId partitionIdentifyRead(ServletRequestDetails theRequestDetails) {
|
public RequestPartitionId partitionIdentifyRead(ServletRequestDetails theRequestDetails) {
|
||||||
|
|
||||||
|
// Just to be nice, figure out the first line in the stack that isn't a part of the
|
||||||
|
// partitioning or interceptor infrastructure, just so it's obvious who is asking
|
||||||
|
// for a partition ID
|
||||||
String stack;
|
String stack;
|
||||||
try {
|
try {
|
||||||
throw new Exception();
|
throw new Exception();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
stack = StackTraceHelper.getStackAsString(e);
|
stack = StackTraceHelper.getStackAsString(e);
|
||||||
int lastWantedNewLine = StringUtils.ordinalIndexOf(stack, "\n", 25);
|
stack = Arrays.stream(stack.split("\\n"))
|
||||||
if (lastWantedNewLine != -1) {
|
.filter(t->t.contains("ca.uhn.fhir"))
|
||||||
stack = stack.substring(0, lastWantedNewLine);
|
.filter(t->!t.toLowerCase().contains("interceptor"))
|
||||||
}
|
.filter(t->!t.toLowerCase().contains("partitionhelper"))
|
||||||
|
.findFirst()
|
||||||
|
.orElse("UNKNOWN");
|
||||||
}
|
}
|
||||||
|
|
||||||
RequestPartitionId retVal = myReadRequestPartitionIds.remove(0);
|
RequestPartitionId retVal = myReadRequestPartitionIds.remove(0);
|
||||||
|
|
|
@ -218,6 +218,7 @@ public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
|
||||||
super(theName);
|
super(theName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
public void invoke(IPointcut thePointcut, HookParams theArgs) {
|
||||||
doInvoke(thePointcut, theArgs);
|
doInvoke(thePointcut, theArgs);
|
||||||
}
|
}
|
||||||
|
|
|
@ -130,37 +130,14 @@ public class FhirResourceDaoR4ConcurrentWriteTest extends BaseJpaR4Test {
|
||||||
* Make a transaction with conditional updates that will fail due to
|
* Make a transaction with conditional updates that will fail due to
|
||||||
* constraint errors and be retried automatically. Make sure that the
|
* constraint errors and be retried automatically. Make sure that the
|
||||||
* retry succeeds and that the data ultimately gets written.
|
* retry succeeds and that the data ultimately gets written.
|
||||||
|
*
|
||||||
|
* This test used to use a composite unique search parameter, but
|
||||||
|
* can now rely on the {@link ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity}
|
||||||
|
* instead.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testTransactionCreates_WithRetry() throws ExecutionException, InterruptedException {
|
public void testTransactionCreates_WithRetry() throws ExecutionException, InterruptedException {
|
||||||
myInterceptorRegistry.registerInterceptor(myRetryInterceptor);
|
myInterceptorRegistry.registerInterceptor(myRetryInterceptor);
|
||||||
myStorageSettings.setUniqueIndexesEnabled(true);
|
|
||||||
|
|
||||||
// Create a unique search parameter to enfore uniqueness
|
|
||||||
// TODO: remove this once we have a better way to enfore these
|
|
||||||
SearchParameter sp = new SearchParameter();
|
|
||||||
sp.setId("SearchParameter/Practitioner-identifier");
|
|
||||||
sp.setType(Enumerations.SearchParamType.TOKEN);
|
|
||||||
sp.setCode("identifier");
|
|
||||||
sp.setExpression("Practitioner.identifier");
|
|
||||||
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
|
||||||
sp.addBase("Practitioner");
|
|
||||||
mySearchParameterDao.update(sp);
|
|
||||||
|
|
||||||
sp = new SearchParameter();
|
|
||||||
sp.setId("SearchParameter/Practitioner-identifier-unique");
|
|
||||||
sp.setType(Enumerations.SearchParamType.COMPOSITE);
|
|
||||||
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
|
||||||
sp.addBase("Practitioner");
|
|
||||||
sp.addComponent()
|
|
||||||
.setExpression("Practitioner")
|
|
||||||
.setDefinition("SearchParameter/Practitioner-identifier");
|
|
||||||
sp.addExtension()
|
|
||||||
.setUrl(HapiExtensions.EXT_SP_UNIQUE)
|
|
||||||
.setValue(new BooleanType(true));
|
|
||||||
mySearchParameterDao.update(sp);
|
|
||||||
|
|
||||||
mySearchParamRegistry.forceRefresh();
|
|
||||||
|
|
||||||
AtomicInteger setCounter = new AtomicInteger(0);
|
AtomicInteger setCounter = new AtomicInteger(0);
|
||||||
AtomicInteger fuzzCounter = new AtomicInteger(0);
|
AtomicInteger fuzzCounter = new AtomicInteger(0);
|
||||||
|
@ -191,9 +168,9 @@ public class FhirResourceDaoR4ConcurrentWriteTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
assertEquals(1, counts.get("Patient"), counts.toString());
|
assertEquals(1, counts.get("Patient"), counts.toString());
|
||||||
assertEquals(1, counts.get("Observation"), counts.toString());
|
assertEquals(1, counts.get("Observation"), counts.toString());
|
||||||
assertEquals(7, myResourceLinkDao.count()); // 1 for SP, 6 for transaction
|
assertEquals(6, myResourceLinkDao.count());
|
||||||
assertEquals(8, myResourceTableDao.count()); // 2 SPs, 6 resources
|
assertEquals(6, myResourceTableDao.count());
|
||||||
assertEquals(16, myResourceHistoryTableDao.count());
|
assertEquals(14, myResourceHistoryTableDao.count());
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
||||||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||||
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
||||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
|
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||||
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
|
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
|
||||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||||
import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum;
|
import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum;
|
||||||
|
@ -44,6 +45,7 @@ import org.hl7.fhir.r4.model.Narrative;
|
||||||
import org.hl7.fhir.r4.model.Observation;
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.hl7.fhir.r4.model.Practitioner;
|
import org.hl7.fhir.r4.model.Practitioner;
|
||||||
|
import org.hl7.fhir.r4.model.Provenance;
|
||||||
import org.hl7.fhir.r4.model.Quantity;
|
import org.hl7.fhir.r4.model.Quantity;
|
||||||
import org.hl7.fhir.r4.model.Reference;
|
import org.hl7.fhir.r4.model.Reference;
|
||||||
import org.hl7.fhir.r4.model.ServiceRequest;
|
import org.hl7.fhir.r4.model.ServiceRequest;
|
||||||
|
@ -68,6 +70,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
@ -133,7 +136,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||||
assertThat(myCaptureQueriesListener.getInsertQueriesForCurrentThread(), empty());
|
assertThat(myCaptureQueriesListener.getInsertQueriesForCurrentThread(), empty());
|
||||||
assertEquals(1, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -159,7 +162,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||||
assertEquals(1, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -178,7 +181,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
myCaptureQueriesListener.clear();
|
myCaptureQueriesListener.clear();
|
||||||
group = updateGroup(group, patientList.subList(initialPatientsCount, allPatientsCount));
|
group = updateGroup(group, patientList.subList(initialPatientsCount, allPatientsCount));
|
||||||
|
|
||||||
assertQueryCount(10, 1, 2, 1);
|
assertQueryCount(10, 1, 2, 0);
|
||||||
|
|
||||||
assertEquals(allPatientsCount, group.getMember().size());
|
assertEquals(allPatientsCount, group.getMember().size());
|
||||||
|
|
||||||
|
@ -200,7 +203,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
group = updateGroup(group, Collections.emptyList());
|
group = updateGroup(group, Collections.emptyList());
|
||||||
|
|
||||||
myCaptureQueriesListener.logSelectQueries();
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
assertQueryCount(5, 1, 2, 1);
|
assertQueryCount(5, 1, 2, 0);
|
||||||
|
|
||||||
assertEquals(30, group.getMember().size());
|
assertEquals(30, group.getMember().size());
|
||||||
|
|
||||||
|
@ -237,7 +240,69 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||||
assertEquals(1, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUpdate_DeletesSearchUrlOnlyWhenPresent() {
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setActive(false);
|
||||||
|
p.addIdentifier().setSystem("http://foo").setValue("123");
|
||||||
|
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
IIdType id = myPatientDao.create(p, "Patient?identifier=http://foo|123", mySrd).getId();
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
assertEquals(1L, id.getVersionIdPartAsLong());
|
||||||
|
|
||||||
|
// Update 1 - Should delete search URL
|
||||||
|
|
||||||
|
p.setActive(true);
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
id = myPatientDao.update(p, "Patient?identifier=http://foo|123", mySrd).getId();
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
assertEquals(2L, id.getVersionIdPartAsLong());
|
||||||
|
|
||||||
|
// Update 2 - Should not try to delete search URL
|
||||||
|
|
||||||
|
p.setActive(false);
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
id = myPatientDao.update(p, "Patient?identifier=http://foo|123", mySrd).getId();
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
assertEquals(3L, id.getVersionIdPartAsLong());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUpdate_DeletesSearchUrlOnlyWhenPresent_NonConditional() {
|
||||||
|
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setActive(false);
|
||||||
|
p.addIdentifier().setSystem("http://foo").setValue("123");
|
||||||
|
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
IIdType id = myPatientDao.create(p, mySrd).getId();
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
assertEquals(1L, id.getVersionIdPartAsLong());
|
||||||
|
|
||||||
|
// Update 1 - Should not try to delete search URL since none should exist
|
||||||
|
|
||||||
|
p.setActive(true);
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
id = myPatientDao.update(p, "Patient?identifier=http://foo|123", mySrd).getId();
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
assertEquals(2L, id.getVersionIdPartAsLong());
|
||||||
|
|
||||||
|
// Update 2 - Should not try to delete search URL
|
||||||
|
|
||||||
|
p.setActive(false);
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
id = myPatientDao.update(p, "Patient?identifier=http://foo|123", mySrd).getId();
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
assertEquals(3L, id.getVersionIdPartAsLong());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -543,7 +608,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||||
assertEquals(1, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||||
|
|
||||||
// Third time (caches all loaded by now)
|
// Third time (caches all loaded by now)
|
||||||
|
|
||||||
|
@ -661,9 +726,9 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
public void assertNoPartitionSelectors() {
|
public void assertNoPartitionSelectors() {
|
||||||
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
|
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
|
||||||
for (SqlQuery next : selectQueries) {
|
for (SqlQuery next : selectQueries) {
|
||||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id is null"), () -> next.getSql(true, true));
|
assertEquals(0, countMatches(next.getSql(true, true).toLowerCase(), "partition_id is null"), () -> next.getSql(true, true));
|
||||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id="), () -> next.getSql(true, true));
|
assertEquals(0, countMatches(next.getSql(true, true).toLowerCase(), "partition_id="), () -> next.getSql(true, true));
|
||||||
assertEquals(0, StringUtils.countMatches(next.getSql(true, true).toLowerCase(), "partition_id ="), () -> next.getSql(true, true));
|
assertEquals(0, countMatches(next.getSql(true, true).toLowerCase(), "partition_id ="), () -> next.getSql(true, true));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1058,7 +1123,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
|
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true).toLowerCase();
|
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true).toLowerCase();
|
||||||
assertEquals(1, StringUtils.countMatches(sql, "join"), sql);
|
assertEquals(1, countMatches(sql, "join"), sql);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1231,7 +1296,6 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
|
|
||||||
myCaptureQueriesListener.clear();
|
myCaptureQueriesListener.clear();
|
||||||
mySystemDao.transaction(mySrd, createTransactionWithCreatesAndOneMatchUrl());
|
mySystemDao.transaction(mySrd, createTransactionWithCreatesAndOneMatchUrl());
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
|
||||||
// 1 lookup for the match URL only
|
// 1 lookup for the match URL only
|
||||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||||
assertEquals(19, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(19, myCaptureQueriesListener.countInsertQueries());
|
||||||
|
@ -1426,7 +1490,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
|
||||||
myCaptureQueriesListener.logUpdateQueries();
|
myCaptureQueriesListener.logUpdateQueries();
|
||||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||||
assertEquals(3, myCaptureQueriesListener.countDeleteQueries());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Third time with mass ingestion mode enabled
|
* Third time with mass ingestion mode enabled
|
||||||
|
@ -1442,7 +1506,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(2, myCaptureQueriesListener.countInsertQueries());
|
||||||
myCaptureQueriesListener.logUpdateQueries();
|
myCaptureQueriesListener.logUpdateQueries();
|
||||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueries());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1509,7 +1573,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
assertEquals(7, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(7, myCaptureQueriesListener.countInsertQueries());
|
||||||
myCaptureQueriesListener.logUpdateQueries();
|
myCaptureQueriesListener.logUpdateQueries();
|
||||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||||
assertEquals(3, myCaptureQueriesListener.countDeleteQueries());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Third time with mass ingestion mode enabled
|
* Third time with mass ingestion mode enabled
|
||||||
|
@ -1525,7 +1589,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||||
myCaptureQueriesListener.logUpdateQueries();
|
myCaptureQueriesListener.logUpdateQueries();
|
||||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueries());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1553,6 +1617,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
mySystemDao.transaction(mySrd, input);
|
mySystemDao.transaction(mySrd, input);
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(1, countMatches(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), "'6445233466262474106'"));
|
||||||
|
assertEquals(1, countMatches(myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false), "'LOC'"));
|
||||||
assertEquals(6, runInTransaction(() -> myResourceTableDao.count()));
|
assertEquals(6, runInTransaction(() -> myResourceTableDao.count()));
|
||||||
|
|
||||||
// Second identical pass
|
// Second identical pass
|
||||||
|
@ -1600,7 +1666,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
myCaptureQueriesListener.clear();
|
myCaptureQueriesListener.clear();
|
||||||
mySystemDao.transaction(mySrd, input);
|
mySystemDao.transaction(mySrd, input);
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
assertEquals(6, runInTransaction(() -> myResourceTableDao.count()));
|
assertEquals(6, runInTransaction(() -> myResourceTableDao.count()));
|
||||||
|
|
||||||
// Second identical pass
|
// Second identical pass
|
||||||
|
@ -1783,7 +1849,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
Bundle outcome = mySystemDao.transaction(mySrd, input.get());
|
Bundle outcome = mySystemDao.transaction(mySrd, input.get());
|
||||||
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||||
myCaptureQueriesListener.logSelectQueries();
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
// One to prefetch sys+val, one to prefetch val
|
||||||
|
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||||
myCaptureQueriesListener.logInsertQueries();
|
myCaptureQueriesListener.logInsertQueries();
|
||||||
assertEquals(45, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(45, myCaptureQueriesListener.countInsertQueries());
|
||||||
myCaptureQueriesListener.logUpdateQueries();
|
myCaptureQueriesListener.logUpdateQueries();
|
||||||
|
@ -1798,12 +1865,12 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
outcome = mySystemDao.transaction(mySrd, input.get());
|
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||||
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||||
myCaptureQueriesListener.logSelectQueries();
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
assertEquals(8, myCaptureQueriesListener.countSelectQueries());
|
assertEquals(9, myCaptureQueriesListener.countSelectQueries());
|
||||||
myCaptureQueriesListener.logInsertQueries();
|
myCaptureQueriesListener.logInsertQueries();
|
||||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||||
myCaptureQueriesListener.logUpdateQueries();
|
myCaptureQueriesListener.logUpdateQueries();
|
||||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
assertEquals(8, myCaptureQueriesListener.countUpdateQueries());
|
||||||
assertEquals(4, myCaptureQueriesListener.countDeleteQueries());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Third time with mass ingestion mode enabled
|
* Third time with mass ingestion mode enabled
|
||||||
|
@ -1815,7 +1882,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
outcome = mySystemDao.transaction(mySrd, input.get());
|
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||||
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||||
myCaptureQueriesListener.logSelectQueries();
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
assertEquals(7, myCaptureQueriesListener.countSelectQueries());
|
assertEquals(8, myCaptureQueriesListener.countSelectQueries());
|
||||||
myCaptureQueriesListener.logInsertQueries();
|
myCaptureQueriesListener.logInsertQueries();
|
||||||
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
assertEquals(4, myCaptureQueriesListener.countInsertQueries());
|
||||||
myCaptureQueriesListener.logUpdateQueries();
|
myCaptureQueriesListener.logUpdateQueries();
|
||||||
|
@ -1946,7 +2013,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
|
|
||||||
// Make sure the match URL query uses a small limit
|
// Make sure the match URL query uses a small limit
|
||||||
String matchUrlQuery = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, true);
|
String matchUrlQuery = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, true);
|
||||||
assertThat(matchUrlQuery, containsString("t0.HASH_SYS_AND_VALUE = '-4132452001562191669'"));
|
assertThat(matchUrlQuery, containsString("HASH_SYS_AND_VALUE='-4132452001562191669'"));
|
||||||
assertThat(matchUrlQuery, containsString("limit '2'"));
|
assertThat(matchUrlQuery, containsString("limit '2'"));
|
||||||
|
|
||||||
runInTransaction(() -> {
|
runInTransaction(() -> {
|
||||||
|
@ -2041,7 +2108,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2541,7 +2608,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
|
|
||||||
// Lookup the two existing IDs to make sure they are legit
|
// Lookup the two existing IDs to make sure they are legit
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
@ -2598,9 +2665,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
output = mySystemDao.transaction(mySrd, input);
|
output = mySystemDao.transaction(mySrd, input);
|
||||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
|
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(output));
|
||||||
|
|
||||||
// Lookup the two existing IDs to make sure they are legit
|
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
@ -2663,6 +2729,60 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This test runs a transaction bundle that has a large number of inline match URLs,
|
||||||
|
* as well as a large number of updates (PUT). This means that a lot of URLs and resources
|
||||||
|
* need to be resolved (ie SQL SELECT) in order to proceed with the transaction. Prior
|
||||||
|
* to the optimization that introduced this test, we had 140 SELECTs, now it's 17.
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testTransactionWithManyInlineMatchUrls() throws IOException {
|
||||||
|
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true);
|
||||||
|
|
||||||
|
Bundle input = loadResource(myFhirContext, Bundle.class, "/r4/test-patient-bundle.json");
|
||||||
|
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
Bundle output = mySystemDao.transaction(mySrd, input);
|
||||||
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
|
|
||||||
|
assertEquals(17, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(6607, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(418, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(2, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(input.getEntry().size(), output.getEntry().size());
|
||||||
|
|
||||||
|
runInTransaction(()->{
|
||||||
|
assertEquals(437, myResourceTableDao.count());
|
||||||
|
assertEquals(437, myResourceHistoryTableDao.count());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTransactionWithClientAssignedId() {
|
||||||
|
BundleBuilder bb = new BundleBuilder(myFhirContext);
|
||||||
|
|
||||||
|
for (int i = 0; i < 5; i++) {
|
||||||
|
Provenance prov = new Provenance();
|
||||||
|
prov.setId(IdType.newRandomUuid());
|
||||||
|
prov.setOccurred(new DateTimeType("2022"));
|
||||||
|
bb.addTransactionUpdateEntry(prov).conditional("Provenance/Patient-0d3b0c98-048e-4111-b804-d1c6c7816d5e-" + i);
|
||||||
|
}
|
||||||
|
|
||||||
|
Bundle input = bb.getBundleTyped();
|
||||||
|
|
||||||
|
// input.getEntry().get(0).
|
||||||
|
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
mySystemDao.transaction(mySrd, input);
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testValueSetExpand_NotPreExpanded_UseHibernateSearch() {
|
public void testValueSetExpand_NotPreExpanded_UseHibernateSearch() {
|
||||||
createLocalCsAndVs();
|
createLocalCsAndVs();
|
||||||
|
@ -2845,7 +2965,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
assertEquals(7, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
assertEquals(7, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
assertEquals(3, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2888,7 +3008,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
||||||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(2, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
assertEquals(6, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
assertEquals(7, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1146,7 +1146,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
assertEquals(3, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
assertEquals(3, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -1216,7 +1216,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
runInTransaction(() -> {
|
runInTransaction(() -> {
|
||||||
assertEquals(1, myResourceTableDao.count());
|
assertEquals(1, myResourceTableDao.count());
|
||||||
|
@ -1278,7 +1278,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
pt.getNameFirstRep().addGiven("GIVEN1C");
|
pt.getNameFirstRep().addGiven("GIVEN1C");
|
||||||
myPatientDao.update(pt);
|
myPatientDao.update(pt);
|
||||||
|
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1307,7 +1307,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
pt.addName().setFamily("FAMILY2");
|
pt.addName().setFamily("FAMILY2");
|
||||||
myPatientDao.update(pt);
|
myPatientDao.update(pt);
|
||||||
|
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
||||||
assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
||||||
|
|
||||||
|
@ -1355,7 +1355,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
*/
|
*/
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
||||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
||||||
|
|
||||||
|
@ -1413,7 +1413,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
pt.getManagingOrganization().setReference(orgId2.getValue());
|
pt.getManagingOrganization().setReference(orgId2.getValue());
|
||||||
myPatientDao.update(pt);
|
myPatientDao.update(pt);
|
||||||
|
|
||||||
assertEquals(1, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread()); // Add an entry to HFJ_RES_VER
|
||||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread()); // Update SPIDX_STRING and HFJ_RESOURCE
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,9 @@ import ca.uhn.fhir.util.BundleBuilder;
|
||||||
import ca.uhn.fhir.util.ClasspathUtil;
|
import ca.uhn.fhir.util.ClasspathUtil;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
|
import org.hibernate.envers.query.AuditEntity;
|
||||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
import org.hl7.fhir.instance.model.api.IIdType;
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
import org.hl7.fhir.r4.model.AllergyIntolerance;
|
import org.hl7.fhir.r4.model.AllergyIntolerance;
|
||||||
import org.hl7.fhir.r4.model.Appointment;
|
import org.hl7.fhir.r4.model.Appointment;
|
||||||
|
@ -72,6 +74,7 @@ import org.hl7.fhir.r4.model.OperationOutcome.IssueSeverity;
|
||||||
import org.hl7.fhir.r4.model.Organization;
|
import org.hl7.fhir.r4.model.Organization;
|
||||||
import org.hl7.fhir.r4.model.Patient;
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
import org.hl7.fhir.r4.model.Practitioner;
|
import org.hl7.fhir.r4.model.Practitioner;
|
||||||
|
import org.hl7.fhir.r4.model.Provenance;
|
||||||
import org.hl7.fhir.r4.model.Quantity;
|
import org.hl7.fhir.r4.model.Quantity;
|
||||||
import org.hl7.fhir.r4.model.Reference;
|
import org.hl7.fhir.r4.model.Reference;
|
||||||
import org.hl7.fhir.r4.model.Resource;
|
import org.hl7.fhir.r4.model.Resource;
|
||||||
|
@ -143,6 +146,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(defaults.isAutoCreatePlaceholderReferenceTargets());
|
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(defaults.isAutoCreatePlaceholderReferenceTargets());
|
||||||
myStorageSettings.setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(defaults.isPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets());
|
myStorageSettings.setPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets(defaults.isPopulateIdentifierInAutoCreatedPlaceholderReferenceTargets());
|
||||||
myStorageSettings.setAutoVersionReferenceAtPaths(defaults.getAutoVersionReferenceAtPaths());
|
myStorageSettings.setAutoVersionReferenceAtPaths(defaults.getAutoVersionReferenceAtPaths());
|
||||||
|
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(defaults.isAutoCreatePlaceholderReferenceTargets());
|
||||||
|
|
||||||
myFhirContext.getParserOptions().setAutoContainReferenceTargetsWithNoId(true);
|
myFhirContext.getParserOptions().setAutoContainReferenceTargetsWithNoId(true);
|
||||||
}
|
}
|
||||||
|
@ -1488,24 +1492,48 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This test is testing whether someone can sneakily figure out the existence of a resource
|
||||||
|
* by creating a match URL that references it, even though the user doesn't have permission
|
||||||
|
* to see that resource.
|
||||||
|
* <p>
|
||||||
|
* This security check requires a match URL that is too complex for the pre-fetching that
|
||||||
|
* happens in {@link ca.uhn.fhir.jpa.dao.TransactionProcessor}'s preFetchConditionalUrl
|
||||||
|
* method (see the javadoc on that method for more details).
|
||||||
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testTransactionCreateInlineMatchUrlWithAuthorizationDenied() {
|
public void testTransactionCreateInlineMatchUrlWithAuthorizationDenied() {
|
||||||
// setup
|
// setup
|
||||||
String methodName = "testTransactionCreateInlineMatchUrlWithAuthorizationDenied";
|
|
||||||
Bundle request = new Bundle();
|
|
||||||
|
|
||||||
myStorageSettings.setAllowInlineMatchUrlReferences(true);
|
myStorageSettings.setAllowInlineMatchUrlReferences(true);
|
||||||
|
|
||||||
Patient p = new Patient();
|
// Let's create a sensitive observation - Nobody must know we caught COVID!
|
||||||
p.addIdentifier().setSystem("urn:system").setValue(methodName);
|
Patient patient = new Patient();
|
||||||
p.setId("Patient/" + methodName);
|
patient.setId("J");
|
||||||
IIdType id = myPatientDao.update(p, mySrd).getId();
|
patient.addName().setFamily("Corona").addGiven("John");
|
||||||
ourLog.info("Created patient, got it: {}", id);
|
myPatientDao.update(patient, mySrd);
|
||||||
|
|
||||||
Observation o = new Observation();
|
Observation obs = new Observation();
|
||||||
o.getCode().setText("Some Observation");
|
obs.setId("Observation/O");
|
||||||
o.getSubject().setReference("Patient?identifier=urn%3Asystem%7C" + methodName);
|
obs.setStatus(ObservationStatus.FINAL);
|
||||||
request.addEntry().setResource(o).getRequest().setMethod(HTTPVerb.POST);
|
obs.setSubject(new Reference("Patient/J"));
|
||||||
|
obs.getCode().addCoding()
|
||||||
|
.setSystem("http://loinc.org")
|
||||||
|
.setCode("94505-5")
|
||||||
|
.setDisplay("SARS-CoV-2 (COVID-19) IgG Ab [Units/volume] in Serum or Plasma by Immunoassay");
|
||||||
|
obs.setValue(new Quantity()
|
||||||
|
.setValue(284L)
|
||||||
|
.setCode("[arb'U]/ml")
|
||||||
|
.setSystem("http://unitsofmeasure.org"));
|
||||||
|
myObservationDao.update(obs, mySrd);
|
||||||
|
|
||||||
|
// Create a bundle that tries to sneakily link to the
|
||||||
|
// patient's covid test
|
||||||
|
Bundle request = new Bundle();
|
||||||
|
|
||||||
|
Observation sneakyObs = new Observation();
|
||||||
|
sneakyObs.setSubject(new Reference("Patient/J"));
|
||||||
|
sneakyObs.addHasMember(new Reference("Observation?patient=Patient/J&code=http://loinc.org|94505-5"));
|
||||||
|
request.addEntry().setResource(sneakyObs).getRequest().setMethod(HTTPVerb.POST);
|
||||||
|
|
||||||
when(mySrd.getRestOperationType()).thenReturn(RestOperationTypeEnum.TRANSACTION);
|
when(mySrd.getRestOperationType()).thenReturn(RestOperationTypeEnum.TRANSACTION);
|
||||||
|
|
||||||
|
@ -1528,7 +1556,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
// verify
|
// verify
|
||||||
fail();
|
fail();
|
||||||
} catch (ResourceNotFoundException e) {
|
} catch (ResourceNotFoundException e) {
|
||||||
assertEquals(Msg.code(1091) + "Invalid match URL \"Patient?identifier=urn%3Asystem%7CtestTransactionCreateInlineMatchUrlWithAuthorizationDenied\" - No resources match this search", e.getMessage());
|
assertEquals(Msg.code(1091) + "Invalid match URL \"Observation?patient=Patient/J&code=http://loinc.org|94505-5\" - No resources match this search", e.getMessage());
|
||||||
} finally {
|
} finally {
|
||||||
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
myInterceptorRegistry.unregisterInterceptor(interceptor);
|
||||||
}
|
}
|
||||||
|
@ -1904,7 +1932,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertEquals(Msg.code(1092) + "Invalid match URL \"Patient?identifier=urn%3Asystem%7CtestTransactionCreateInlineMatchUrlWithTwoMatches\" - Multiple resources match this search", e.getMessage());
|
assertEquals(Msg.code(2207) + "Invalid match URL \"Patient?identifier=urn%3Asystem%7CtestTransactionCreateInlineMatchUrlWithTwoMatches\" - Multiple resources match this search", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1980,7 +2008,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertThat(e.getMessage(), containsString("with match URL \"Patient"));
|
assertThat(e.getMessage(), containsString("Multiple resources match this search"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3534,7 +3562,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
mySystemDao.transaction(mySrd, request);
|
mySystemDao.transaction(mySrd, request);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertThat(e.getMessage(), containsString("with match URL \"Patient"));
|
assertThat(e.getMessage(), containsString("Multiple resources match this search"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4080,7 +4108,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
||||||
mySystemDao.transaction(mySrd, bundle);
|
mySystemDao.transaction(mySrd, bundle);
|
||||||
fail();
|
fail();
|
||||||
} catch (PreconditionFailedException e) {
|
} catch (PreconditionFailedException e) {
|
||||||
assertEquals(Msg.code(1092) + "Invalid match URL \"Patient?identifier=http://www.ghh.org/identifiers|condreftestpatid1\" - Multiple resources match this search", e.getMessage());
|
assertEquals(Msg.code(2207) + "Invalid match URL \"Patient?identifier=http://www.ghh.org/identifiers|condreftestpatid1\" - Multiple resources match this search", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,6 +68,7 @@ import org.mockito.ArgumentCaptor;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.time.LocalDate;
|
import java.time.LocalDate;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -118,7 +119,9 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||||
|
|
||||||
@AfterEach
|
@AfterEach
|
||||||
public void afterEach() {
|
public void afterEach() {
|
||||||
myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(new JpaStorageSettings().isMarkResourcesForReindexingUponSearchParameterChange());
|
JpaStorageSettings defaults = new JpaStorageSettings();
|
||||||
|
myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(defaults.isMarkResourcesForReindexingUponSearchParameterChange());
|
||||||
|
myStorageSettings.setMatchUrlCacheEnabled(defaults.isMatchUrlCacheEnabled());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -2763,8 +2766,9 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||||
Bundle outcome = mySystemDao.transaction(mySrd, input.get());
|
Bundle outcome = mySystemDao.transaction(mySrd, input.get());
|
||||||
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false), containsString("resourcein0_.HASH_SYS_AND_VALUE='-4132452001562191669' and (resourcein0_.PARTITION_ID in ('1'))"));
|
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false), containsString("esourcein0_.PARTITION_ID in ('1')"));
|
||||||
|
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false), containsString("HASH_SYS_AND_VALUE in ('7432183691485874662' , '-3772330830566471409'"));
|
||||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||||
assertEquals(45, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(45, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||||
|
@ -2779,12 +2783,12 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||||
outcome = mySystemDao.transaction(mySrd, input.get());
|
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||||
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||||
assertEquals(8, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
assertEquals(8, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
assertEquals(4, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Third time with mass ingestion mode enabled
|
* Third time with mass ingestion mode enabled
|
||||||
|
@ -2796,7 +2800,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||||
outcome = mySystemDao.transaction(mySrd, input.get());
|
outcome = mySystemDao.transaction(mySrd, input.get());
|
||||||
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||||
|
@ -2819,6 +2823,64 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
||||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTransactionWithManyInlineMatchUrls() throws IOException {
|
||||||
|
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true);
|
||||||
|
myStorageSettings.setMatchUrlCacheEnabled(true);
|
||||||
|
myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.DISABLED);
|
||||||
|
|
||||||
|
SystemRequestDetails requestDetails = new SystemRequestDetails();
|
||||||
|
requestDetails.setRequestPartitionId(RequestPartitionId.fromPartitionId(myPartitionId));
|
||||||
|
|
||||||
|
Bundle input = loadResource(myFhirContext, Bundle.class, "/r4/test-patient-bundle.json");
|
||||||
|
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
Bundle output = mySystemDao.transaction(requestDetails, input);
|
||||||
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
|
|
||||||
|
assertEquals(18, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(6607, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(418, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(2, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(input.getEntry().size(), output.getEntry().size());
|
||||||
|
|
||||||
|
runInTransaction(()->{
|
||||||
|
assertEquals(437, myResourceTableDao.count());
|
||||||
|
assertEquals(437, myResourceHistoryTableDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Run a second time
|
||||||
|
*/
|
||||||
|
|
||||||
|
requestDetails = new SystemRequestDetails();
|
||||||
|
requestDetails.setRequestPartitionId(RequestPartitionId.fromPartitionId(myPartitionId));
|
||||||
|
|
||||||
|
input = loadResource(myFhirContext, Bundle.class, "/r4/test-patient-bundle.json");
|
||||||
|
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
output = mySystemDao.transaction(requestDetails, input);
|
||||||
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
|
|
||||||
|
assertEquals(29, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(input.getEntry().size(), output.getEntry().size());
|
||||||
|
|
||||||
|
runInTransaction(()->{
|
||||||
|
assertEquals(437, myResourceTableDao.count());
|
||||||
|
assertEquals(437, myResourceHistoryTableDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* JA: I disabled this test - I am not clear on what it was actually trying to test
|
* JA: I disabled this test - I am not clear on what it was actually trying to test
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,386 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r5;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||||
|
import ca.uhn.fhir.util.BundleBuilder;
|
||||||
|
import org.hl7.fhir.r5.model.Bundle;
|
||||||
|
import org.hl7.fhir.r5.model.IdType;
|
||||||
|
import org.hl7.fhir.r5.model.Observation;
|
||||||
|
import org.hl7.fhir.r5.model.Patient;
|
||||||
|
import org.hl7.fhir.r5.model.Quantity;
|
||||||
|
import org.hl7.fhir.r5.model.Reference;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.CsvSource;
|
||||||
|
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void after() {
|
||||||
|
JpaStorageSettings defaults = new JpaStorageSettings();
|
||||||
|
myStorageSettings.setIndexMissingFields(defaults.getIndexMissingFields());
|
||||||
|
myStorageSettings.setMatchUrlCacheEnabled(defaults.isMatchUrlCacheEnabled());
|
||||||
|
myStorageSettings.setDeleteEnabled(defaults.isDeleteEnabled());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If an inline match URL is the same as a conditional create in the same transaction, make sure we
|
||||||
|
* don't issue a select for it
|
||||||
|
*/
|
||||||
|
@ParameterizedTest(name = "{index}: {0}")
|
||||||
|
@CsvSource({
|
||||||
|
"Match URL Cache Enabled, true",
|
||||||
|
"Match URL Cache Disabled, false"
|
||||||
|
})
|
||||||
|
public void testInlineMatchUrlMatchesConditionalUpdate(@SuppressWarnings("unused") String theName, boolean theMatchUrlCacheEnabled) {
|
||||||
|
// Setup
|
||||||
|
myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.DISABLED);
|
||||||
|
myStorageSettings.setMatchUrlCacheEnabled(theMatchUrlCacheEnabled);
|
||||||
|
|
||||||
|
BundleBuilder bb = new BundleBuilder(myFhirContext);
|
||||||
|
|
||||||
|
Observation observation1 = new Observation();
|
||||||
|
observation1.addIdentifier().setSystem("http://observation").setValue("111");
|
||||||
|
observation1.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionUpdateEntry(observation1).conditional("Observation?identifier=http://observation|111");
|
||||||
|
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.addIdentifier().setSystem("http://patient").setValue("123");
|
||||||
|
bb.addTransactionUpdateEntry(patient).conditional("Patient?identifier=http://patient|123");
|
||||||
|
|
||||||
|
Observation observation2 = new Observation();
|
||||||
|
observation2.addIdentifier().setSystem("http://observation").setValue("222");
|
||||||
|
observation2.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionUpdateEntry(observation2).conditional("Observation?identifier=http://observation|222");
|
||||||
|
|
||||||
|
Observation observation3 = new Observation();
|
||||||
|
observation3.addIdentifier().setSystem("http://observation").setValue("333");
|
||||||
|
observation3.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionUpdateEntry(observation3).conditional("Observation?identifier=http://observation|333");
|
||||||
|
|
||||||
|
Bundle input = bb.getBundleTyped();
|
||||||
|
|
||||||
|
// Test
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
Bundle output = mySystemDao.transaction(mySrd, input);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
|
||||||
|
// One select to resolve the 3 match URLs
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
String firstSelectQuery = myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false);
|
||||||
|
assertEquals(1, countMatches(firstSelectQuery, "HASH_SYS_AND_VALUE in (? , ? , ? , ?)"), firstSelectQuery);
|
||||||
|
assertEquals(23, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(3, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(4, output.getEntry().size());
|
||||||
|
|
||||||
|
IdType patientId = new IdType(output.getEntry().get(1).getResponse().getLocation());
|
||||||
|
IdType observationId = new IdType(output.getEntry().get(2).getResponse().getLocation());
|
||||||
|
Observation actualObs = myObservationDao.read(observationId, mySrd);
|
||||||
|
assertEquals(patientId.toUnqualifiedVersionless().getValue(), actualObs.getSubject().getReference());
|
||||||
|
|
||||||
|
myCaptureQueriesListener.logInsertQueries();
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(4, myResourceTableDao.count());
|
||||||
|
assertEquals(4, myResourceHistoryTableDao.count());
|
||||||
|
assertEquals(6, myResourceLinkDao.count());
|
||||||
|
assertEquals(5, myResourceIndexedSearchParamTokenDao.count());
|
||||||
|
assertEquals(0, myResourceIndexedSearchParamStringDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Repeat, and make sure we reuse
|
||||||
|
*/
|
||||||
|
|
||||||
|
bb = new BundleBuilder(myFhirContext);
|
||||||
|
|
||||||
|
observation1 = new Observation();
|
||||||
|
observation1.addIdentifier().setSystem("http://observation").setValue("111");
|
||||||
|
observation1.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionUpdateEntry(observation1).conditional("Observation?identifier=http://observation|111");
|
||||||
|
|
||||||
|
patient = new Patient();
|
||||||
|
patient.addIdentifier().setSystem("http://patient").setValue("123");
|
||||||
|
bb.addTransactionUpdateEntry(patient).conditional("Patient?identifier=http://patient|123");
|
||||||
|
|
||||||
|
observation2 = new Observation();
|
||||||
|
observation2.addIdentifier().setSystem("http://observation").setValue("222");
|
||||||
|
observation2.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionUpdateEntry(observation2).conditional("Observation?identifier=http://observation|222");
|
||||||
|
|
||||||
|
observation3 = new Observation();
|
||||||
|
observation3.addIdentifier().setSystem("http://observation").setValue("333");
|
||||||
|
observation3.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionUpdateEntry(observation3).conditional("Observation?identifier=http://observation|333");
|
||||||
|
|
||||||
|
input = bb.getBundleTyped();
|
||||||
|
|
||||||
|
// Test
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
output = mySystemDao.transaction(mySrd, input);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
|
||||||
|
assertEquals(theMatchUrlCacheEnabled ? 4 : 5, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(4, output.getEntry().size());
|
||||||
|
|
||||||
|
patientId = new IdType(output.getEntry().get(1).getResponse().getLocation());
|
||||||
|
observationId = new IdType(output.getEntry().get(2).getResponse().getLocation());
|
||||||
|
actualObs = myObservationDao.read(observationId, mySrd);
|
||||||
|
assertEquals(patientId.toUnqualifiedVersionless().getValue(), actualObs.getSubject().getReference());
|
||||||
|
|
||||||
|
myCaptureQueriesListener.logInsertQueries();
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(4, myResourceTableDao.count());
|
||||||
|
assertEquals(4, myResourceHistoryTableDao.count());
|
||||||
|
assertEquals(6, myResourceLinkDao.count());
|
||||||
|
assertEquals(5, myResourceIndexedSearchParamTokenDao.count());
|
||||||
|
assertEquals(0, myResourceIndexedSearchParamStringDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Repeat once more
|
||||||
|
*/
|
||||||
|
|
||||||
|
bb = new BundleBuilder(myFhirContext);
|
||||||
|
|
||||||
|
observation1 = new Observation();
|
||||||
|
observation1.addIdentifier().setSystem("http://observation").setValue("111");
|
||||||
|
observation1.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionUpdateEntry(observation1).conditional("Observation?identifier=http://observation|111");
|
||||||
|
|
||||||
|
patient = new Patient();
|
||||||
|
patient.addIdentifier().setSystem("http://patient").setValue("123");
|
||||||
|
bb.addTransactionUpdateEntry(patient).conditional("Patient?identifier=http://patient|123");
|
||||||
|
|
||||||
|
observation2 = new Observation();
|
||||||
|
observation2.addIdentifier().setSystem("http://observation").setValue("222");
|
||||||
|
observation2.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionUpdateEntry(observation2).conditional("Observation?identifier=http://observation|222");
|
||||||
|
|
||||||
|
input = bb.getBundleTyped();
|
||||||
|
|
||||||
|
// Test
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
output = mySystemDao.transaction(mySrd, input);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
|
||||||
|
assertEquals(theMatchUrlCacheEnabled ? 4 : 5, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(3, output.getEntry().size());
|
||||||
|
|
||||||
|
patientId = new IdType(output.getEntry().get(1).getResponse().getLocation());
|
||||||
|
observationId = new IdType(output.getEntry().get(2).getResponse().getLocation());
|
||||||
|
actualObs = myObservationDao.read(observationId, mySrd);
|
||||||
|
assertEquals(patientId.toUnqualifiedVersionless().getValue(), actualObs.getSubject().getReference());
|
||||||
|
|
||||||
|
myCaptureQueriesListener.logInsertQueries();
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(4, myResourceTableDao.count());
|
||||||
|
assertEquals(4, myResourceHistoryTableDao.count());
|
||||||
|
assertEquals(6, myResourceLinkDao.count());
|
||||||
|
assertEquals(5, myResourceIndexedSearchParamTokenDao.count());
|
||||||
|
assertEquals(0, myResourceIndexedSearchParamStringDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ParameterizedTest(name = "{index}: {0}")
|
||||||
|
@CsvSource({
|
||||||
|
"Pre-existing with cache, true ,true",
|
||||||
|
"Pre-existing without cache, true ,false",
|
||||||
|
"No match with cache, false ,true",
|
||||||
|
"No match without cache, false ,false",
|
||||||
|
})
|
||||||
|
public void testRepeatedInlineMatchUrls(@SuppressWarnings("unused") String theName, boolean theTargetAlreadyExists, boolean theMatchUrlCacheEnabled) {
|
||||||
|
myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.DISABLED);
|
||||||
|
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true);
|
||||||
|
myStorageSettings.setMatchUrlCacheEnabled(theMatchUrlCacheEnabled);
|
||||||
|
myStorageSettings.setDeleteEnabled(false);
|
||||||
|
|
||||||
|
if (theTargetAlreadyExists) {
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.addIdentifier().setSystem("http://patient").setValue("123");
|
||||||
|
myPatientDao.create(patient, mySrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
BundleBuilder bb = new BundleBuilder(myFhirContext);
|
||||||
|
|
||||||
|
for (int i = 0; i < 4; i++) {
|
||||||
|
Observation observation1 = new Observation();
|
||||||
|
observation1.addIdentifier().setSystem("http://observation").setValue(Integer.toString(i));
|
||||||
|
observation1.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionCreateEntry(observation1);
|
||||||
|
}
|
||||||
|
Bundle input = bb.getBundleTyped();
|
||||||
|
|
||||||
|
// Test
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
Bundle output = mySystemDao.transaction(mySrd, input);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(theTargetAlreadyExists ? 20 : 24, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(4, output.getEntry().size());
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(5, myResourceTableDao.count());
|
||||||
|
assertEquals(5, myResourceHistoryTableDao.count());
|
||||||
|
assertEquals(8, myResourceLinkDao.count());
|
||||||
|
assertEquals(6, myResourceIndexedSearchParamTokenDao.count());
|
||||||
|
assertEquals(0, myResourceIndexedSearchParamStringDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Second pass
|
||||||
|
*/
|
||||||
|
|
||||||
|
bb = new BundleBuilder(myFhirContext);
|
||||||
|
|
||||||
|
for (int i = 0; i < 4; i++) {
|
||||||
|
Observation observation1 = new Observation();
|
||||||
|
observation1.addIdentifier().setSystem("http://observation").setValue(Integer.toString(i));
|
||||||
|
observation1.setSubject(new Reference("Patient?identifier=http://patient|123"));
|
||||||
|
bb.addTransactionCreateEntry(observation1);
|
||||||
|
}
|
||||||
|
input = bb.getBundleTyped();
|
||||||
|
|
||||||
|
// Test
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
output = mySystemDao.transaction(mySrd, input);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
|
assertEquals(theMatchUrlCacheEnabled ? 0 : 1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(20, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(4, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(4, output.getEntry().size());
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(9, myResourceTableDao.count());
|
||||||
|
assertEquals(9, myResourceHistoryTableDao.count());
|
||||||
|
assertEquals(16, myResourceLinkDao.count());
|
||||||
|
assertEquals(10, myResourceIndexedSearchParamTokenDao.count());
|
||||||
|
assertEquals(0, myResourceIndexedSearchParamStringDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ParameterizedTest(name = "{index}: {0}")
|
||||||
|
@CsvSource({
|
||||||
|
"NC Pre-existing with cache, true ,true, false",
|
||||||
|
"NC Pre-existing without cache, true ,false, false",
|
||||||
|
"NC No match with cache, false ,true, false",
|
||||||
|
"NC No match without cache, false ,false, false",
|
||||||
|
"C Pre-existing with cache, true ,true, true",
|
||||||
|
"C Pre-existing without cache, true ,false, true",
|
||||||
|
"C No match with cache, false ,true, true",
|
||||||
|
"C No match without cache, false ,false, true",
|
||||||
|
})
|
||||||
|
public void testComplexConditionalUpdate(String theName, boolean theTargetAlreadyExists, boolean theMatchUrlCacheEnabled, boolean theResourceChanges) {
|
||||||
|
myStorageSettings.setIndexMissingFields(JpaStorageSettings.IndexEnabledEnum.DISABLED);
|
||||||
|
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true);
|
||||||
|
myStorageSettings.setMatchUrlCacheEnabled(theMatchUrlCacheEnabled);
|
||||||
|
myStorageSettings.setDeleteEnabled(false);
|
||||||
|
|
||||||
|
Patient patient = new Patient();
|
||||||
|
patient.setId("Patient/P");
|
||||||
|
myPatientDao.update(patient, mySrd);
|
||||||
|
|
||||||
|
if (theTargetAlreadyExists) {
|
||||||
|
Observation observation1 = new Observation();
|
||||||
|
observation1.setValue(new Quantity(5L));
|
||||||
|
observation1.setSubject(new Reference("Patient/P"));
|
||||||
|
myObservationDao.create(observation1, mySrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
BundleBuilder bb = new BundleBuilder(myFhirContext);
|
||||||
|
Observation observation1 = new Observation();
|
||||||
|
if (theResourceChanges) {
|
||||||
|
observation1.addNote().setText(UUID.randomUUID().toString());
|
||||||
|
}
|
||||||
|
observation1.setValue(new Quantity(5L));
|
||||||
|
observation1.setSubject(new Reference("Patient/P"));
|
||||||
|
bb.addTransactionUpdateEntry(observation1).conditional("Observation?subject=Patient/P&value-quantity=5");
|
||||||
|
Bundle input = bb.getBundleTyped();
|
||||||
|
|
||||||
|
// Test
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
Bundle output = mySystemDao.transaction(mySrd, input);
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
myCaptureQueriesListener.logSelectQueries();
|
||||||
|
if (theTargetAlreadyExists) {
|
||||||
|
if (theResourceChanges) {
|
||||||
|
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
} else {
|
||||||
|
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (theResourceChanges) {
|
||||||
|
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(7, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
} else {
|
||||||
|
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||||
|
assertEquals(7, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||||
|
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||||
|
assertEquals(0, myCaptureQueriesListener.countRollbacks());
|
||||||
|
|
||||||
|
assertEquals(1, output.getEntry().size());
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
assertEquals(2, myResourceTableDao.count());
|
||||||
|
assertEquals((theTargetAlreadyExists && theResourceChanges) ? 3 : 2, myResourceHistoryTableDao.count());
|
||||||
|
assertEquals(2, myResourceLinkDao.count());
|
||||||
|
assertEquals(1, myResourceIndexedSearchParamTokenDao.count());
|
||||||
|
assertEquals(0, myResourceIndexedSearchParamStringDao.count());
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server Test Utilities
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.fhir.jpa.embedded;
|
package ca.uhn.fhir.jpa.embedded;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server Test Utilities
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.fhir.jpa.embedded;
|
package ca.uhn.fhir.jpa.embedded;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server Test Utilities
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.fhir.jpa.embedded;
|
package ca.uhn.fhir.jpa.embedded;
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server Test Utilities
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.fhir.jpa.embedded;
|
package ca.uhn.fhir.jpa.embedded;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server Test Utilities
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.fhir.jpa.embedded;
|
package ca.uhn.fhir.jpa.embedded;
|
||||||
|
|
||||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.rest.api.server.storage;
|
package ca.uhn.fhir.rest.api.server.storage;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.i18n.Msg;
|
import ca.uhn.fhir.i18n.Msg;
|
||||||
import ca.uhn.fhir.interceptor.api.HookParams;
|
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||||
|
@ -209,14 +210,14 @@ public class TransactionDetails {
|
||||||
* "<code>Observation/123</code>") and a storage ID for that resource. Resources should only be placed within
|
* "<code>Observation/123</code>") and a storage ID for that resource. Resources should only be placed within
|
||||||
* the TransactionDetails if they are known to exist and be valid targets for other resources to link to.
|
* the TransactionDetails if they are known to exist and be valid targets for other resources to link to.
|
||||||
*/
|
*/
|
||||||
public void addResolvedMatchUrl(String theConditionalUrl, @Nonnull IResourcePersistentId thePersistentId) {
|
public void addResolvedMatchUrl(FhirContext theFhirContext, String theConditionalUrl, @Nonnull IResourcePersistentId thePersistentId) {
|
||||||
Validate.notBlank(theConditionalUrl);
|
Validate.notBlank(theConditionalUrl);
|
||||||
Validate.notNull(thePersistentId);
|
Validate.notNull(thePersistentId);
|
||||||
|
|
||||||
if (myResolvedMatchUrls.isEmpty()) {
|
if (myResolvedMatchUrls.isEmpty()) {
|
||||||
myResolvedMatchUrls = new HashMap<>();
|
myResolvedMatchUrls = new HashMap<>();
|
||||||
} else if (matchUrlWithDiffIdExists(theConditionalUrl, thePersistentId)) {
|
} else if (matchUrlWithDiffIdExists(theConditionalUrl, thePersistentId)) {
|
||||||
String msg = "Invalid match URL " + theConditionalUrl + " - Multiple resources match this search";
|
String msg = theFhirContext.getLocalizer().getMessage(TransactionDetails.class, "invalidMatchUrlMultipleMatches", theConditionalUrl);
|
||||||
throw new PreconditionFailedException(Msg.code(2207) + msg);
|
throw new PreconditionFailedException(Msg.code(2207) + msg);
|
||||||
}
|
}
|
||||||
myResolvedMatchUrls.put(theConditionalUrl, thePersistentId);
|
myResolvedMatchUrls.put(theConditionalUrl, thePersistentId);
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
/*-
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server - Batch2 Task Processor
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
package ca.uhn.fhir.batch2.model;
|
package ca.uhn.fhir.batch2.model;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.coordinator.BatchWorkChunk;
|
import ca.uhn.fhir.batch2.coordinator.BatchWorkChunk;
|
||||||
|
|
|
@ -256,7 +256,11 @@ public abstract class BaseStorageDao {
|
||||||
|
|
||||||
protected DaoMethodOutcome toMethodOutcome(RequestDetails theRequest, @Nonnull final IBasePersistedResource theEntity, @Nonnull IBaseResource theResource, @Nullable String theMatchUrl, @Nonnull RestOperationTypeEnum theOperationType) {
|
protected DaoMethodOutcome toMethodOutcome(RequestDetails theRequest, @Nonnull final IBasePersistedResource theEntity, @Nonnull IBaseResource theResource, @Nullable String theMatchUrl, @Nonnull RestOperationTypeEnum theOperationType) {
|
||||||
DaoMethodOutcome outcome = new DaoMethodOutcome();
|
DaoMethodOutcome outcome = new DaoMethodOutcome();
|
||||||
outcome.setPersistentId(theEntity.getPersistentId());
|
|
||||||
|
IResourcePersistentId persistentId = theEntity.getPersistentId();
|
||||||
|
persistentId.setAssociatedResourceId(theResource.getIdElement());
|
||||||
|
|
||||||
|
outcome.setPersistentId(persistentId);
|
||||||
outcome.setMatchUrl(theMatchUrl);
|
outcome.setMatchUrl(theMatchUrl);
|
||||||
outcome.setOperationType(theOperationType);
|
outcome.setOperationType(theOperationType);
|
||||||
|
|
||||||
|
|
|
@ -152,7 +152,7 @@ public class MatchResourceUrlService<T extends IResourcePersistentId> {
|
||||||
|
|
||||||
if (retVal.size() == 1) {
|
if (retVal.size() == 1) {
|
||||||
T pid = retVal.iterator().next();
|
T pid = retVal.iterator().next();
|
||||||
theTransactionDetails.addResolvedMatchUrl(matchUrl, pid);
|
theTransactionDetails.addResolvedMatchUrl(myContext, matchUrl, pid);
|
||||||
if (myStorageSettings.isMatchUrlCacheEnabled()) {
|
if (myStorageSettings.isMatchUrlCacheEnabled()) {
|
||||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl, pid);
|
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl, pid);
|
||||||
}
|
}
|
||||||
|
@ -216,7 +216,7 @@ public class MatchResourceUrlService<T extends IResourcePersistentId> {
|
||||||
Validate.notBlank(theMatchUrl);
|
Validate.notBlank(theMatchUrl);
|
||||||
Validate.notNull(theResourcePersistentId);
|
Validate.notNull(theResourcePersistentId);
|
||||||
String matchUrl = massageForStorage(theResourceType, theMatchUrl);
|
String matchUrl = massageForStorage(theResourceType, theMatchUrl);
|
||||||
theTransactionDetails.addResolvedMatchUrl(matchUrl, theResourcePersistentId);
|
theTransactionDetails.addResolvedMatchUrl(myContext, matchUrl, theResourcePersistentId);
|
||||||
if (myStorageSettings.isMatchUrlCacheEnabled()) {
|
if (myStorageSettings.isMatchUrlCacheEnabled()) {
|
||||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl, theResourcePersistentId);
|
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, matchUrl, theResourcePersistentId);
|
||||||
}
|
}
|
||||||
|
|
|
@ -100,25 +100,30 @@ public abstract class BaseSearchParamWithInlineReferencesExtractor<T extends IRe
|
||||||
}
|
}
|
||||||
|
|
||||||
T match;
|
T match;
|
||||||
|
IIdType newId = null;
|
||||||
if (matches.isEmpty()) {
|
if (matches.isEmpty()) {
|
||||||
Optional<IBasePersistedResource> placeholderOpt = myDaoResourceLinkResolver.createPlaceholderTargetIfConfiguredToDoSo(matchResourceType, nextRef, null, theRequestDetails, theTransactionDetails);
|
Optional<IBasePersistedResource> placeholderOpt = myDaoResourceLinkResolver.createPlaceholderTargetIfConfiguredToDoSo(matchResourceType, nextRef, null, theRequestDetails, theTransactionDetails);
|
||||||
if (placeholderOpt.isPresent()) {
|
if (placeholderOpt.isPresent()) {
|
||||||
match = (T) placeholderOpt.get().getPersistentId();
|
match = (T) placeholderOpt.get().getPersistentId();
|
||||||
match.setAssociatedResourceId(placeholderOpt.get().getIdDt());
|
newId = myFhirContext.getVersion().newIdType();
|
||||||
theTransactionDetails.addResolvedMatchUrl(nextIdText, match);
|
newId.setValue(placeholderOpt.get().getIdDt().getValue());
|
||||||
|
match.setAssociatedResourceId(newId);
|
||||||
|
theTransactionDetails.addResolvedMatchUrl(myFhirContext, nextIdText, match);
|
||||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, nextIdText, match);
|
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.MATCH_URL, nextIdText, match);
|
||||||
} else {
|
} else {
|
||||||
String msg = myFhirContext.getLocalizer().getMessage(BaseStorageDao.class, "invalidMatchUrlNoMatches", nextId.getValue());
|
String msg = myFhirContext.getLocalizer().getMessage(BaseStorageDao.class, "invalidMatchUrlNoMatches", nextId.getValue());
|
||||||
throw new ResourceNotFoundException(Msg.code(1091) + msg);
|
throw new ResourceNotFoundException(Msg.code(1091) + msg);
|
||||||
}
|
}
|
||||||
} else if (matches.size() > 1) {
|
} else if (matches.size() > 1) {
|
||||||
String msg = myFhirContext.getLocalizer().getMessage(BaseStorageDao.class, "invalidMatchUrlMultipleMatches", nextId.getValue());
|
String msg = myFhirContext.getLocalizer().getMessage(TransactionDetails.class, "invalidMatchUrlMultipleMatches", nextId.getValue());
|
||||||
throw new PreconditionFailedException(Msg.code(1092) + msg);
|
throw new PreconditionFailedException(Msg.code(1092) + msg);
|
||||||
} else {
|
} else {
|
||||||
match = matches.iterator().next();
|
match = matches.iterator().next();
|
||||||
}
|
}
|
||||||
|
|
||||||
IIdType newId = myIdHelperService.translatePidIdToForcedId(myFhirContext, resourceTypeString, match);
|
if (newId == null) {
|
||||||
|
newId = myIdHelperService.translatePidIdToForcedId(myFhirContext, resourceTypeString, match);
|
||||||
|
}
|
||||||
ourLog.debug("Replacing inline match URL[{}] with ID[{}}", nextId.getValue(), newId);
|
ourLog.debug("Replacing inline match URL[{}] with ID[{}}", nextId.getValue(), newId);
|
||||||
|
|
||||||
if (theTransactionDetails != null) {
|
if (theTransactionDetails != null) {
|
||||||
|
|
|
@ -1,18 +1,26 @@
|
||||||
package ca.uhn.fhir.util;
|
package ca.uhn.fhir.util;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
import ca.uhn.fhir.rest.api.Constants;
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
import org.apache.http.message.BasicNameValuePair;
|
import org.apache.http.message.BasicNameValuePair;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
import org.junit.jupiter.params.provider.CsvSource;
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||||
import static org.junit.jupiter.api.Assertions.assertAll;
|
import static org.junit.jupiter.api.Assertions.assertAll;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
public class UrlUtilTest {
|
public class UrlUtilTest {
|
||||||
|
|
||||||
|
private final FhirContext myCtx = FhirContext.forR4Cached();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testNormalizeCanonicalUrl() {
|
public void testNormalizeCanonicalUrl() {
|
||||||
assertEquals("http://foo", UrlUtil.normalizeCanonicalUrlForComparison("http://foo/"));
|
assertEquals("http://foo", UrlUtil.normalizeCanonicalUrlForComparison("http://foo/"));
|
||||||
|
@ -122,4 +130,37 @@ public class UrlUtilTest {
|
||||||
containsInAnyOrder(new BasicNameValuePair("names", "homer|simpson")));
|
containsInAnyOrder(new BasicNameValuePair("names", "homer|simpson")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ParameterizedTest
|
||||||
|
@CsvSource({
|
||||||
|
"null, null",
|
||||||
|
"null, urn:uuid:12345",
|
||||||
|
"Patient, Patient",
|
||||||
|
"Patient, Patient?",
|
||||||
|
"Patient, Patient?identifier=foo",
|
||||||
|
"Patient, /Patient",
|
||||||
|
"Patient, /Patient?",
|
||||||
|
"Patient, /Patient?identifier=foo",
|
||||||
|
"Patient, http://foo/base/Patient?identifier=foo",
|
||||||
|
"Patient, http://foo/base/Patient/1",
|
||||||
|
"Patient, http://foo/base/Patient/1/_history/2",
|
||||||
|
"Patient, /Patient/1",
|
||||||
|
"Patient, /Patient/1/_history/2",
|
||||||
|
"Patient, Patient/1",
|
||||||
|
"Patient, Patient/1/_history/2",
|
||||||
|
})
|
||||||
|
public void testDetermineResourceTypeInResourceUrl(String theExpected, String theUrl) {
|
||||||
|
String url = theUrl;
|
||||||
|
if (url.equals("null")) {
|
||||||
|
url = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
String actual = UrlUtil.determineResourceTypeInResourceUrl(myCtx, url);
|
||||||
|
|
||||||
|
if (theExpected.equals("null")) {
|
||||||
|
assertNull(actual);
|
||||||
|
} else {
|
||||||
|
assertEquals(theExpected, actual);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
Loading…
Reference in New Issue