Caching cleanup (#6522)
* Tests passing * Cleanup * Cleanupo * Test fixes * Test fix * Cleanup * Update hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java Co-authored-by: Ken Stevens <khstevens@gmail.com> * Update hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java Co-authored-by: Ken Stevens <khstevens@gmail.com> * Update hapi-fhir-validation/src/main/java/org/hl7/fhir/common/hapi/validation/support/ValidationSupportChain.java Co-authored-by: Ken Stevens <khstevens@gmail.com> * Account for review comments * Spotless * Compile fix * Test fixes * Test cleanup * Test cleanup * Test fixes * Resolve fixme * Test fix * Test fixes * Test fixes * Test fixes * Fix * Test fix * Test fixes * Test fixes * Cache cleanup * Work on caches * Add changelog * Compile fix * Spotless * Test fix * Fix * Cache cleanup * Trying to figure out why Lucene tests are failing * Test fix * More cleanuo * Test fix * Test fix * Address some review comments * HAPI FHIR version bump * Test fixes * Cleanup * Add assert * Add check * Test fixes * Bulk export fix * Test fixes * Add some test logging * Cleanup * Cleanup * Test cleanup * Add test logging * Attemped test fix * Add testing method * FIx build * Attempted test fix * Try to fix tests * Restore fixme * Spotless * Test fix --------- Co-authored-by: Ken Stevens <khstevens@gmail.com>
This commit is contained in:
parent
d145e2a4bb
commit
ea1d179b9a
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -150,9 +150,15 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor
|
|||
@Override
|
||||
public List<IBaseResource> fetchAllConformanceResources() {
|
||||
ArrayList<IBaseResource> retVal = new ArrayList<>();
|
||||
retVal.addAll(myCodeSystems.values());
|
||||
retVal.addAll(myStructureDefinitions.values());
|
||||
retVal.addAll(myValueSets.values());
|
||||
if (myCodeSystems != null) {
|
||||
retVal.addAll(myCodeSystems.values());
|
||||
}
|
||||
if (myStructureDefinitionResources != null) {
|
||||
retVal.addAll(myStructureDefinitions.values());
|
||||
}
|
||||
if (myValueSets != null) {
|
||||
retVal.addAll(myValueSets.values());
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ public class ValidationSupportContext {
|
|||
private final Set<String> myCurrentlyGeneratingSnapshots = new HashSet<>();
|
||||
|
||||
public ValidationSupportContext(IValidationSupport theRootValidationSupport) {
|
||||
Validate.notNull(theRootValidationSupport, "theRootValidationSupport musty not be null");
|
||||
Validate.notNull(theRootValidationSupport, "theRootValidationSupport must not be null");
|
||||
myRootValidationSupport = theRootValidationSupport;
|
||||
}
|
||||
|
||||
|
|
|
@ -74,6 +74,17 @@ public interface IFhirVersion {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link IIdType} instance for the given version with the given value
|
||||
*
|
||||
* @since 8.0.0
|
||||
*/
|
||||
default IIdType newIdType(String theResourceType, String theIdPart) {
|
||||
IIdType retVal = newIdType();
|
||||
retVal.setParts(null, theResourceType, theIdPart, null);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an instance of <code>IFhirVersionServer<code> for this version.
|
||||
* Note that this method may only be called if the <code>hapi-fhir-server</code>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
@ -12,7 +12,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 6522
|
||||
title: "Several memory caches in various parts of the JPA server have been removed in an
|
||||
effort to consolidate caching in this system to two places: The MemoryCacheService, and
|
||||
ValidationSupportChain. This should make management of the system easier."
|
||||
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -23,7 +23,9 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
|
@ -37,6 +39,7 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
@ -90,20 +93,9 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
|
|||
public ResourcePersistentIdMap getLatestVersionIdsForResourceIds(
|
||||
RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||
ResourcePersistentIdMap idToPID = new ResourcePersistentIdMap();
|
||||
HashMap<String, List<IIdType>> resourceTypeToIds = new HashMap<>();
|
||||
|
||||
for (IIdType id : theIds) {
|
||||
String resourceType = id.getResourceType();
|
||||
if (!resourceTypeToIds.containsKey(resourceType)) {
|
||||
resourceTypeToIds.put(resourceType, new ArrayList<>());
|
||||
}
|
||||
resourceTypeToIds.get(resourceType).add(id);
|
||||
}
|
||||
|
||||
for (List<IIdType> nextIds : resourceTypeToIds.values()) {
|
||||
ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId, nextIds);
|
||||
idToPID.putAll(idAndPID);
|
||||
}
|
||||
ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId, theIds);
|
||||
idToPID.putAll(idAndPID);
|
||||
|
||||
return idToPID;
|
||||
}
|
||||
|
@ -124,14 +116,17 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
|
|||
return retval;
|
||||
}
|
||||
|
||||
List<JpaPid> jpaPids =
|
||||
myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId, new ArrayList<>(theIds));
|
||||
Map<IIdType, IResourceLookup<JpaPid>> identities = myIdHelperService.resolveResourceIdentities(
|
||||
thePartitionId,
|
||||
new ArrayList<>(theIds),
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
|
||||
// we'll use this map to fetch pids that require versions
|
||||
HashMap<Long, JpaPid> pidsToVersionToResourcePid = new HashMap<>();
|
||||
|
||||
// fill in our map
|
||||
for (JpaPid pid : jpaPids) {
|
||||
for (IResourceLookup<JpaPid> identity : identities.values()) {
|
||||
JpaPid pid = identity.getPersistentId();
|
||||
if (pid.getVersion() == null) {
|
||||
pidsToVersionToResourcePid.put(pid.getId(), pid);
|
||||
}
|
||||
|
|
|
@ -883,7 +883,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
}
|
||||
if (!StringUtils.isBlank(entity.getResourceType())) {
|
||||
validateIncomingResourceTypeMatchesExisting(theResource, entity);
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
// This is just a sanity check and should never actually fail.
|
||||
// We resolve the ID using IdLookupService, and there should be
|
||||
// no way to get it to give you a mismatched type for an ID.
|
||||
Validate.isTrue(resourceType.equals(entity.getResourceType()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1215,7 +1219,13 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
} else {
|
||||
historyEntity = (ResourceHistoryTable) theHistoryEntity;
|
||||
if (!StringUtils.isBlank(historyEntity.getResourceType())) {
|
||||
validateIncomingResourceTypeMatchesExisting(theResource, historyEntity);
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
if (!resourceType.equals(historyEntity.getResourceType())) {
|
||||
throw new UnprocessableEntityException(Msg.code(930) + "Existing resource ID["
|
||||
+ historyEntity.getIdDt().toUnqualifiedVersionless() + "] is of type["
|
||||
+ historyEntity.getResourceType()
|
||||
+ "] - Cannot update with [" + resourceType + "]");
|
||||
}
|
||||
}
|
||||
|
||||
historyEntity.setDeleted(null);
|
||||
|
@ -1408,15 +1418,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
return theRequest != null ? theRequest.getRequestId() : null;
|
||||
}
|
||||
|
||||
private void validateIncomingResourceTypeMatchesExisting(IBaseResource theResource, BaseHasResource entity) {
|
||||
String resourceType = myContext.getResourceType(theResource);
|
||||
if (!resourceType.equals(entity.getResourceType())) {
|
||||
throw new UnprocessableEntityException(Msg.code(930) + "Existing resource ID["
|
||||
+ entity.getIdDt().toUnqualifiedVersionless() + "] is of type[" + entity.getResourceType()
|
||||
+ "] - Cannot update with [" + resourceType + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DaoMethodOutcome updateInternal(
|
||||
RequestDetails theRequestDetails,
|
||||
|
|
|
@ -573,7 +573,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
if (fhirId == null) {
|
||||
fhirId = Long.toString(entity.getId());
|
||||
}
|
||||
myIdHelperService.addResolvedPidToFhirId(jpaPid, theRequestPartitionId, getResourceName(), fhirId, null);
|
||||
myIdHelperService.addResolvedPidToFhirIdAfterCommit(
|
||||
jpaPid, theRequestPartitionId, getResourceName(), fhirId, null);
|
||||
theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid);
|
||||
theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource);
|
||||
|
||||
|
@ -723,9 +724,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
validateIdPresentForDelete(theId);
|
||||
validateDeleteEnabled();
|
||||
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(
|
||||
theRequestDetails, getResourceName(), theId);
|
||||
|
||||
final ResourceTable entity;
|
||||
try {
|
||||
entity = readEntityLatestVersion(theId, theRequestDetails, theTransactionDetails);
|
||||
entity = readEntityLatestVersion(theId, requestPartitionId, theTransactionDetails);
|
||||
} catch (ResourceNotFoundException ex) {
|
||||
// we don't want to throw 404s.
|
||||
// if not found, return an outcome anyways.
|
||||
|
@ -803,6 +807,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
.getMessageSanitized(BaseStorageDao.class, "successfulTimingSuffix", w.getMillis());
|
||||
outcome.setOperationOutcome(createInfoOperationOutcome(msg, StorageResponseCodeEnum.SUCCESSFUL_DELETE));
|
||||
|
||||
myIdHelperService.addResolvedPidToFhirIdAfterCommit(
|
||||
entity.getPersistentId(),
|
||||
requestPartitionId,
|
||||
entity.getResourceType(),
|
||||
entity.getFhirId(),
|
||||
entity.getDeleted());
|
||||
|
||||
return outcome;
|
||||
}
|
||||
|
||||
|
@ -1144,15 +1155,15 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions, RequestDetails theRequest) {
|
||||
HapiTransactionService.noTransactionAllowed();
|
||||
validateExpungeEnabled();
|
||||
return forceExpungeInExistingTransaction(theId, theExpungeOptions, theRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions, RequestDetails theRequestDetails) {
|
||||
HapiTransactionService.noTransactionAllowed();
|
||||
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
|
||||
validateExpungeEnabled();
|
||||
return myExpungeService.expunge(getResourceName(), null, theExpungeOptions, theRequestDetails);
|
||||
|
@ -1873,47 +1884,37 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
IIdType theId,
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
TransactionDetails theTransactionDetails) {
|
||||
validateResourceTypeAndThrowInvalidRequestException(theId);
|
||||
HapiTransactionService.requireTransaction();
|
||||
|
||||
IIdType id = theId;
|
||||
validateResourceTypeAndThrowInvalidRequestException(id);
|
||||
if (!id.hasResourceType()) {
|
||||
id = id.withResourceType(getResourceName());
|
||||
}
|
||||
|
||||
JpaPid persistentId = null;
|
||||
if (theTransactionDetails != null) {
|
||||
if (theTransactionDetails.isResolvedResourceIdEmpty(theId.toUnqualifiedVersionless())) {
|
||||
throw new ResourceNotFoundException(Msg.code(1997) + theId);
|
||||
if (theTransactionDetails.isResolvedResourceIdEmpty(id.toUnqualifiedVersionless())) {
|
||||
throw new ResourceNotFoundException(Msg.code(1997) + id);
|
||||
}
|
||||
if (theTransactionDetails.hasResolvedResourceIds()) {
|
||||
persistentId = (JpaPid) theTransactionDetails.getResolvedResourceId(theId);
|
||||
persistentId = (JpaPid) theTransactionDetails.getResolvedResourceId(id);
|
||||
}
|
||||
}
|
||||
|
||||
if (persistentId == null) {
|
||||
String resourceName = getResourceName();
|
||||
if (myStorageSettings.getResourceClientIdStrategy()
|
||||
== JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) {
|
||||
if (theId.isIdPartValidLong()) {
|
||||
/*
|
||||
* If it's a pure numeric ID and we are in ALPHANUMERIC mode, then the number
|
||||
* corresponds to a DB PID. In this case we want to resolve it regardless of
|
||||
* which type the client has supplied. This is because DB PIDs are unique across
|
||||
* all resource types (unlike FHIR_IDs which are namespaced to the resource type).
|
||||
* We want to load the resource with that PID regardless of type because if
|
||||
* the user is trying to update it we want to fail if the type is wrong, as
|
||||
* opposed to trying to create a new instance.
|
||||
*/
|
||||
resourceName = null;
|
||||
}
|
||||
}
|
||||
persistentId = myIdHelperService.resolveResourceIdentityPid(
|
||||
theRequestPartitionId,
|
||||
resourceName,
|
||||
theId.getIdPart(),
|
||||
id.getResourceType(),
|
||||
id.getIdPart(),
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
}
|
||||
|
||||
ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId());
|
||||
if (entity == null) {
|
||||
throw new ResourceNotFoundException(Msg.code(1998) + theId);
|
||||
throw new ResourceNotFoundException(Msg.code(1998) + id);
|
||||
}
|
||||
validateGivenIdIsAppropriateToRetrieveResource(theId, entity);
|
||||
validateGivenIdIsAppropriateToRetrieveResource(id, entity);
|
||||
return entity;
|
||||
}
|
||||
|
||||
|
@ -2427,6 +2428,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
assert resourceId != null;
|
||||
assert resourceId.hasIdPart();
|
||||
|
||||
if (!resourceId.hasResourceType()) {
|
||||
resourceId = resourceId.withResourceType(getResourceName());
|
||||
}
|
||||
|
||||
boolean create = false;
|
||||
|
||||
if (theRequest != null) {
|
||||
|
@ -2494,6 +2499,18 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
entity.setSearchUrlPresent(false);
|
||||
}
|
||||
|
||||
if (entity.isDeleted()) {
|
||||
// We're un-deleting this entity so let's inform the memory cache service
|
||||
myIdHelperService.addResolvedPidToFhirIdAfterCommit(
|
||||
entity.getPersistentId(),
|
||||
entity.getPartitionId() == null
|
||||
? RequestPartitionId.defaultPartition()
|
||||
: entity.getPartitionId().toPartitionId(),
|
||||
entity.getResourceType(),
|
||||
entity.getFhirId(),
|
||||
null);
|
||||
}
|
||||
|
||||
return super.doUpdateForUpdateOrPatch(
|
||||
theRequest,
|
||||
theResourceId,
|
||||
|
@ -2571,7 +2588,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.SUPPORTS)
|
||||
public MethodOutcome validate(
|
||||
T theResource,
|
||||
IIdType theId,
|
||||
|
@ -2587,19 +2603,30 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
throw new InvalidRequestException(
|
||||
Msg.code(991) + "No ID supplied. ID is required when validating with mode=DELETE");
|
||||
}
|
||||
final ResourceTable entity = readEntityLatestVersion(theId, theRequest, transactionDetails);
|
||||
|
||||
// Validate that there are no resources pointing to the candidate that
|
||||
// would prevent deletion
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) {
|
||||
myDeleteConflictService.validateOkToDelete(
|
||||
deleteConflicts, entity, true, theRequest, new TransactionDetails());
|
||||
}
|
||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
RequestPartitionId requestPartitionId =
|
||||
myRequestPartitionHelperService.determineReadPartitionForRequestForRead(
|
||||
theRequest, getResourceName(), theId);
|
||||
|
||||
IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete");
|
||||
return new MethodOutcome(new IdDt(theId.getValue()), oo);
|
||||
return myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> {
|
||||
final ResourceTable entity =
|
||||
readEntityLatestVersion(theId, requestPartitionId, transactionDetails);
|
||||
|
||||
// Validate that there are no resources pointing to the candidate that
|
||||
// would prevent deletion
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
if (getStorageSettings().isEnforceReferentialIntegrityOnDelete()) {
|
||||
myDeleteConflictService.validateOkToDelete(
|
||||
deleteConflicts, entity, true, theRequest, new TransactionDetails());
|
||||
}
|
||||
DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(getContext(), deleteConflicts);
|
||||
|
||||
IBaseOperationOutcome oo = createInfoOperationOutcome("Ok to delete");
|
||||
return new MethodOutcome(new IdDt(theId.getValue()), oo);
|
||||
});
|
||||
}
|
||||
|
||||
FhirValidator validator = getContext().newValidator();
|
||||
|
|
|
@ -26,8 +26,6 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.term.TermReadSvcUtil;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
|
@ -35,76 +33,57 @@ import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
|||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.UriParam;
|
||||
import ca.uhn.fhir.sl.cache.Cache;
|
||||
import ca.uhn.fhir.sl.cache.CacheFactory;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.ImplementationGuide;
|
||||
import org.hl7.fhir.r4.model.Questionnaire;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
|
||||
import static org.hl7.fhir.instance.model.api.IAnyResource.SP_RES_LAST_UPDATED;
|
||||
|
||||
/**
|
||||
* This class is a {@link IValidationSupport Validation support} module that loads
|
||||
* validation resources (StructureDefinition, ValueSet, CodeSystem, etc.) from the resources
|
||||
* persisted in the JPA server.
|
||||
*/
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public class JpaPersistedResourceValidationSupport implements IValidationSupport {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(JpaPersistedResourceValidationSupport.class);
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final IBaseResource myNoMatch;
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
||||
@Autowired
|
||||
private ITermReadSvc myTermReadSvc;
|
||||
|
||||
private Class<? extends IBaseResource> myCodeSystemType;
|
||||
private Class<? extends IBaseResource> myStructureDefinitionType;
|
||||
private Class<? extends IBaseResource> myValueSetType;
|
||||
|
||||
// TODO: JA2 We shouldn't need to cache here, but we probably still should since the
|
||||
// TermReadSvcImpl calls these methods as a part of its "isCodeSystemSupported" calls.
|
||||
// We should modify CachingValidationSupport to cache the results of "isXXXSupported"
|
||||
// at which point we could do away with this cache
|
||||
// TODO: LD: This cache seems to supersede the cache in CachingValidationSupport, as that cache is set to
|
||||
// 10 minutes, but this 1 minute cache now determines the expiry.
|
||||
// This new behaviour was introduced between the 7.0.0 release and the current master (7.2.0)
|
||||
private Cache<String, IBaseResource> myLoadCache = CacheFactory.build(TimeUnit.MINUTES.toMillis(1), 1000);
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public JpaPersistedResourceValidationSupport(FhirContext theFhirContext) {
|
||||
super();
|
||||
Validate.notNull(theFhirContext);
|
||||
Validate.notNull(theFhirContext, "theFhirContext must not be null");
|
||||
myFhirContext = theFhirContext;
|
||||
|
||||
myNoMatch = myFhirContext.getResourceDefinition("Basic").newInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -115,51 +94,44 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
@Override
|
||||
public IBaseResource fetchCodeSystem(String theSystem) {
|
||||
if (TermReadSvcUtil.isLoincUnversionedCodeSystem(theSystem)) {
|
||||
Optional<IBaseResource> currentCSOpt = getCodeSystemCurrentVersion(new UriType(theSystem));
|
||||
if (!currentCSOpt.isPresent()) {
|
||||
ourLog.info("Couldn't find current version of CodeSystem: " + theSystem);
|
||||
}
|
||||
return currentCSOpt.orElse(null);
|
||||
IIdType id = myFhirContext.getVersion().newIdType("CodeSystem", LOINC_LOW);
|
||||
return findResourceByIdWithNoException(id, myCodeSystemType);
|
||||
}
|
||||
|
||||
return fetchResource(myCodeSystemType, theSystem);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtains the current version of a CodeSystem using the fact that the current
|
||||
* version is always pointed by the ForcedId for the no-versioned CS
|
||||
*/
|
||||
private Optional<IBaseResource> getCodeSystemCurrentVersion(UriType theUrl) {
|
||||
if (!theUrl.getValueAsString().contains(LOINC_LOW)) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return myTermReadSvc.readCodeSystemByForcedId(LOINC_LOW);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseResource fetchValueSet(String theSystem) {
|
||||
if (TermReadSvcUtil.isLoincUnversionedValueSet(theSystem)) {
|
||||
Optional<IBaseResource> currentVSOpt = getValueSetCurrentVersion(new UriType(theSystem));
|
||||
return currentVSOpt.orElse(null);
|
||||
Optional<String> vsIdOpt = TermReadSvcUtil.getValueSetId(theSystem);
|
||||
if (vsIdOpt.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
IIdType id = myFhirContext.getVersion().newIdType("ValueSet", vsIdOpt.get());
|
||||
return findResourceByIdWithNoException(id, myValueSetType);
|
||||
}
|
||||
|
||||
return fetchResource(myValueSetType, theSystem);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtains the current version of a ValueSet using the fact that the current
|
||||
* version is always pointed by the ForcedId for the no-versioned VS
|
||||
* Performs a lookup by ID, with no exception thrown (since that can mark the active
|
||||
* transaction as rollback).
|
||||
*/
|
||||
private Optional<IBaseResource> getValueSetCurrentVersion(UriType theUrl) {
|
||||
Optional<String> vsIdOpt = TermReadSvcUtil.getValueSetId(theUrl.getValueAsString());
|
||||
if (!vsIdOpt.isPresent()) {
|
||||
return Optional.empty();
|
||||
@Nullable
|
||||
private IBaseResource findResourceByIdWithNoException(IIdType id, Class<? extends IBaseResource> type) {
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous()
|
||||
.setLoadSynchronousUpTo(1)
|
||||
.add(IAnyResource.SP_RES_ID, new TokenParam(id.getValue()));
|
||||
IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(type);
|
||||
IBundleProvider outcome = dao.search(map, new SystemRequestDetails());
|
||||
List<IBaseResource> resources = outcome.getResources(0, 1);
|
||||
if (resources.isEmpty()) {
|
||||
return null;
|
||||
} else {
|
||||
return resources.get(0);
|
||||
}
|
||||
|
||||
IFhirResourceDao<? extends IBaseResource> valueSetResourceDao = myDaoRegistry.getResourceDao(myValueSetType);
|
||||
IBaseResource valueSet = valueSetResourceDao.read(new IdDt("ValueSet", vsIdOpt.get()));
|
||||
return Optional.ofNullable(valueSet);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -188,17 +160,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
return null;
|
||||
}
|
||||
|
||||
String key = theClass + " " + theUri;
|
||||
IBaseResource fetched = myLoadCache.get(key, t -> doFetchResource(theClass, theUri));
|
||||
|
||||
if (fetched == myNoMatch) {
|
||||
ourLog.debug(
|
||||
"Invalidating cache entry for URI: {} since the result of the underlying query is empty", theUri);
|
||||
myLoadCache.invalidate(key);
|
||||
return null;
|
||||
}
|
||||
|
||||
return (T) fetched;
|
||||
return (T) doFetchResource(theClass, theUri);
|
||||
}
|
||||
|
||||
private <T extends IBaseResource> IBaseResource doFetchResource(@Nullable Class<T> theClass, String theUri) {
|
||||
|
@ -209,17 +171,14 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
() -> doFetchResource(StructureDefinition.class, theUri)
|
||||
};
|
||||
return Arrays.stream(fetchers)
|
||||
.map(t -> t.get())
|
||||
.filter(t -> t != myNoMatch)
|
||||
.map(Supplier::get)
|
||||
.filter(Objects::nonNull)
|
||||
.findFirst()
|
||||
.orElse(myNoMatch);
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
IdType id = new IdType(theUri);
|
||||
boolean localReference = false;
|
||||
if (id.hasBaseUrl() == false && id.hasIdPart() == true) {
|
||||
localReference = true;
|
||||
}
|
||||
boolean localReference = id.hasBaseUrl() == false && id.hasIdPart() == true;
|
||||
|
||||
String resourceName = myFhirContext.getResourceType(theClass);
|
||||
IBundleProvider search;
|
||||
|
@ -230,7 +189,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
params.setLoadSynchronousUpTo(1);
|
||||
params.add(IAnyResource.SP_RES_ID, new StringParam(theUri));
|
||||
search = myDaoRegistry.getResourceDao(resourceName).search(params);
|
||||
if (search.size() == 0) {
|
||||
if (search.isEmpty()) {
|
||||
params = new SearchParameterMap();
|
||||
params.setLoadSynchronousUpTo(1);
|
||||
params.add(ValueSet.SP_URL, new UriParam(theUri));
|
||||
|
@ -246,7 +205,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
} else {
|
||||
params.add(ValueSet.SP_URL, new UriParam(theUri));
|
||||
}
|
||||
params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC));
|
||||
params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC));
|
||||
search = myDaoRegistry.getResourceDao(resourceName).search(params);
|
||||
|
||||
if (search.isEmpty()
|
||||
|
@ -255,11 +214,13 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
params.setLoadSynchronousUpTo(1);
|
||||
if (versionSeparator != -1) {
|
||||
params.add(ValueSet.SP_VERSION, new TokenParam(theUri.substring(versionSeparator + 1)));
|
||||
params.add("system", new UriParam(theUri.substring(0, versionSeparator)));
|
||||
params.add(
|
||||
ca.uhn.fhir.model.dstu2.resource.ValueSet.SP_SYSTEM,
|
||||
new UriParam(theUri.substring(0, versionSeparator)));
|
||||
} else {
|
||||
params.add("system", new UriParam(theUri));
|
||||
params.add(ca.uhn.fhir.model.dstu2.resource.ValueSet.SP_SYSTEM, new UriParam(theUri));
|
||||
}
|
||||
params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC));
|
||||
params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC));
|
||||
search = myDaoRegistry.getResourceDao(resourceName).search(params);
|
||||
}
|
||||
}
|
||||
|
@ -269,7 +230,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
if (theUri.startsWith("http://hl7.org/fhir/StructureDefinition/")) {
|
||||
String typeName = theUri.substring("http://hl7.org/fhir/StructureDefinition/".length());
|
||||
if (myFhirContext.getElementDefinition(typeName) != null) {
|
||||
return myNoMatch;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
|
@ -299,7 +260,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
} else {
|
||||
params.add(CodeSystem.SP_URL, new UriParam(theUri));
|
||||
}
|
||||
params.setSort(new SortSpec("_lastUpdated").setOrder(SortOrderEnum.DESC));
|
||||
params.setSort(new SortSpec(SP_RES_LAST_UPDATED).setOrder(SortOrderEnum.DESC));
|
||||
search = myDaoRegistry.getResourceDao(resourceName).search(params);
|
||||
break;
|
||||
}
|
||||
|
@ -322,7 +283,7 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
|
||||
Integer size = search.size();
|
||||
if (size == null || size == 0) {
|
||||
return myNoMatch;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (size > 1) {
|
||||
|
@ -349,8 +310,4 @@ public class JpaPersistedResourceValidationSupport implements IValidationSupport
|
|||
myCodeSystemType = myFhirContext.getResourceDefinition("ValueSet").getImplementingClass();
|
||||
}
|
||||
}
|
||||
|
||||
public void clearCaches() {
|
||||
myLoadCache.invalidateAll();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,7 +61,6 @@ import org.apache.commons.lang3.Validate;
|
|||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -70,13 +69,12 @@ import org.springframework.transaction.support.TransactionSynchronizationManager
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -87,7 +85,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
|||
|
||||
/**
|
||||
* This class is used to convert between PIDs (the internal primary key for a particular resource as
|
||||
* stored in the {@link ca.uhn.fhir.jpa.model.entity.ResourceTable HFJ_RESOURCE} table), and the
|
||||
* stored in the {@link ResourceTable HFJ_RESOURCE} table), and the
|
||||
* public ID that a resource has.
|
||||
* <p>
|
||||
* These IDs are sometimes one and the same (by default, a resource that the server assigns the ID of
|
||||
|
@ -150,20 +148,44 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
throws ResourceNotFoundException {
|
||||
|
||||
IIdType id;
|
||||
boolean untyped;
|
||||
if (theResourceType != null) {
|
||||
untyped = false;
|
||||
id = newIdType(theResourceType + "/" + theResourceId);
|
||||
} else {
|
||||
/*
|
||||
* This shouldn't be common, but we need to be able to handle it.
|
||||
* The only real known use case currently is when handing references
|
||||
* in searches where the client didn't qualify the ID. E.g.
|
||||
* /Provenance?target=A,B,C
|
||||
* We emit a warning in this case that they should be qualfying the
|
||||
* IDs, but we do stil allow it.
|
||||
*/
|
||||
untyped = true;
|
||||
id = newIdType(theResourceId);
|
||||
}
|
||||
List<IIdType> ids = List.of(id);
|
||||
Map<IIdType, IResourceLookup<JpaPid>> outcome = resolveResourceIdentities(theRequestPartitionId, ids, theMode);
|
||||
|
||||
// We only pass 1 input in so only 0..1 will come back
|
||||
if (!outcome.containsKey(id)) {
|
||||
Validate.isTrue(outcome.size() <= 1, "Unexpected output size %s for ID: %s", outcome.size(), ids);
|
||||
|
||||
IResourceLookup<JpaPid> retVal;
|
||||
if (untyped) {
|
||||
if (outcome.isEmpty()) {
|
||||
retVal = null;
|
||||
} else {
|
||||
retVal = outcome.values().iterator().next();
|
||||
}
|
||||
} else {
|
||||
retVal = outcome.get(id);
|
||||
}
|
||||
|
||||
if (retVal == null) {
|
||||
throw new ResourceNotFoundException(Msg.code(2001) + "Resource " + id + " is not known");
|
||||
}
|
||||
|
||||
return outcome.get(id);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -180,7 +202,11 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
|
||||
Collection<IIdType> ids = new ArrayList<>(theIds);
|
||||
ids.forEach(id -> Validate.isTrue(id.hasIdPart()));
|
||||
for (IIdType id : theIds) {
|
||||
if (!id.hasIdPart()) {
|
||||
throw new InvalidRequestException(Msg.code(1101) + "Parameter value missing in request");
|
||||
}
|
||||
}
|
||||
|
||||
RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId);
|
||||
ListMultimap<IIdType, IResourceLookup<JpaPid>> idToLookup =
|
||||
|
@ -197,16 +223,16 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
|
||||
// Convert the multimap into a simple map
|
||||
Map<IIdType, IResourceLookup<JpaPid>> retVal = new HashMap<>();
|
||||
Map<IIdType, IResourceLookup<JpaPid>> retVal = new HashMap<>(idToLookup.size());
|
||||
for (Map.Entry<IIdType, IResourceLookup<JpaPid>> next : idToLookup.entries()) {
|
||||
if (next.getValue().getDeleted() != null) {
|
||||
IResourceLookup<JpaPid> nextLookup = next.getValue();
|
||||
|
||||
IIdType resourceId = myFhirCtx.getVersion().newIdType(nextLookup.getResourceType(), nextLookup.getFhirId());
|
||||
if (nextLookup.getDeleted() != null) {
|
||||
if (theMode.isFailOnDeleted()) {
|
||||
String msg = myFhirCtx
|
||||
.getLocalizer()
|
||||
.getMessageSanitized(
|
||||
IdHelperService.class,
|
||||
"deletedId",
|
||||
next.getKey().getValue());
|
||||
.getMessageSanitized(IdHelperService.class, "deletedId", resourceId.getValue());
|
||||
throw new ResourceGoneException(Msg.code(2572) + msg);
|
||||
}
|
||||
if (!theMode.isIncludeDeleted()) {
|
||||
|
@ -214,7 +240,9 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
}
|
||||
|
||||
IResourceLookup previousValue = retVal.put(next.getKey(), next.getValue());
|
||||
nextLookup.getPersistentId().setAssociatedResourceId(resourceId);
|
||||
|
||||
IResourceLookup<JpaPid> previousValue = retVal.put(resourceId, nextLookup);
|
||||
if (previousValue != null) {
|
||||
/*
|
||||
* This means that either:
|
||||
|
@ -224,11 +252,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
* with the same ID.
|
||||
* 2. The unique constraint on the FHIR_ID column has been dropped
|
||||
*/
|
||||
ourLog.warn(
|
||||
"Resource ID[{}] corresponds to lookups: {} and {}",
|
||||
next.getKey(),
|
||||
previousValue,
|
||||
next.getValue());
|
||||
ourLog.warn("Resource ID[{}] corresponds to lookups: {} and {}", resourceId, previousValue, nextLookup);
|
||||
String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId");
|
||||
throw new PreconditionFailedException(Msg.code(1099) + msg);
|
||||
}
|
||||
|
@ -320,26 +344,15 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
|
||||
// one create one clause per id.
|
||||
List<Predicate> innerIdPredicates = new ArrayList<>(theIdsToResolve.size());
|
||||
boolean haveUntypedIds = false;
|
||||
for (IIdType next : theIdsToResolve) {
|
||||
if (!next.hasResourceType()) {
|
||||
haveUntypedIds = true;
|
||||
}
|
||||
|
||||
List<Predicate> idPredicates = new ArrayList<>(2);
|
||||
|
||||
if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC
|
||||
&& next.isIdPartValidLong()) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myId"), next.getIdPartAsLong());
|
||||
if (isNotBlank(next.getResourceType())) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType());
|
||||
idPredicates.add(typeCriteria);
|
||||
} else {
|
||||
if (isNotBlank(next.getResourceType())) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType());
|
||||
idPredicates.add(typeCriteria);
|
||||
}
|
||||
Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart());
|
||||
idPredicates.add(idCriteria);
|
||||
}
|
||||
Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart());
|
||||
idPredicates.add(idCriteria);
|
||||
|
||||
innerIdPredicates.add(cb.and(idPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
}
|
||||
|
@ -357,18 +370,13 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
Integer partitionId = nextId.get(4, Integer.class);
|
||||
if (resourcePid != null) {
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(
|
||||
resourceType, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null));
|
||||
resourceType, fhirId, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null));
|
||||
|
||||
MemoryCacheService.ForcedIdCacheKey nextKey =
|
||||
new MemoryCacheService.ForcedIdCacheKey(resourceType, fhirId, theRequestPartitionId);
|
||||
IIdType id = nextKey.toIdType(myFhirCtx);
|
||||
theMapToPopulate.put(id, lookup);
|
||||
|
||||
if (haveUntypedIds) {
|
||||
id = nextKey.toIdTypeWithoutResourceType(myFhirCtx);
|
||||
theMapToPopulate.put(id, lookup);
|
||||
}
|
||||
|
||||
List<IResourceLookup<JpaPid>> valueToCache = theMapToPopulate.get(id);
|
||||
myMemoryCacheService.putAfterCommit(
|
||||
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey, valueToCache);
|
||||
|
@ -376,78 +384,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a mapping of Id -> IResourcePersistentId.
|
||||
* If any resource is not found, it will throw ResourceNotFound exception (and no map will be returned)
|
||||
* Optionally filters out deleted resources.
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public Map<String, JpaPid> resolveResourcePersistentIds(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
String theResourceType,
|
||||
List<String> theIds,
|
||||
ResolveIdentityMode theMode) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
Validate.notNull(theIds, "theIds cannot be null");
|
||||
Validate.isTrue(!theIds.isEmpty(), "theIds must not be empty");
|
||||
|
||||
Map<String, JpaPid> retVals = new HashMap<>();
|
||||
for (String id : theIds) {
|
||||
JpaPid retVal;
|
||||
if (!idRequiresForcedId(id)) {
|
||||
// is already a PID
|
||||
retVal = JpaPid.fromId(Long.parseLong(id));
|
||||
retVals.put(id, retVal);
|
||||
} else {
|
||||
// is a forced id
|
||||
// we must resolve!
|
||||
if (myStorageSettings.isDeleteEnabled()) {
|
||||
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theMode)
|
||||
.getPersistentId();
|
||||
retVals.put(id, retVal);
|
||||
} else {
|
||||
// fetch from cache... adding to cache if not available
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, id);
|
||||
retVal = myMemoryCacheService.getThenPutAfterCommit(
|
||||
MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, t -> {
|
||||
List<IIdType> ids = Collections.singletonList(new IdType(theResourceType, id));
|
||||
// fetches from cache using a function that checks cache first...
|
||||
List<JpaPid> resolvedIds =
|
||||
resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids);
|
||||
if (resolvedIds.isEmpty()) {
|
||||
throw new ResourceNotFoundException(Msg.code(1100) + ids.get(0));
|
||||
}
|
||||
return resolvedIds.get(0);
|
||||
});
|
||||
retVals.put(id, retVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return retVals;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a resource type and ID, determines the internal persistent ID for the resource.
|
||||
* Optionally filters out deleted resources.
|
||||
*
|
||||
* @throws ResourceNotFoundException If the ID can not be found
|
||||
*/
|
||||
@Nonnull
|
||||
@Override
|
||||
public JpaPid resolveResourcePersistentIds(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
String theResourceType,
|
||||
String theId,
|
||||
ResolveIdentityMode theMode) {
|
||||
Validate.notNull(theId, "theId must not be null");
|
||||
|
||||
Map<String, JpaPid> retVal = resolveResourcePersistentIds(
|
||||
theRequestPartitionId, theResourceType, Collections.singletonList(theId), theMode);
|
||||
return retVal.get(theId); // should be only one
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the given resource ID should be stored in a forced ID. Under default config
|
||||
* (meaning client ID strategy is {@link JpaStorageSettings.ClientIdStrategyEnum#ALPHANUMERIC})
|
||||
|
@ -461,132 +397,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
|| !isValidPid(theId);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private String toForcedIdToPidKey(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
|
||||
return RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs.
|
||||
* <p>
|
||||
* This implementation will always try to use a cache for performance, meaning that it can resolve resources that
|
||||
* are deleted (but note that forced IDs can't change, so the cache can't return incorrect results)
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public List<JpaPid> resolveResourcePersistentIdsWithCache(
|
||||
RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||
boolean onlyForcedIds = false;
|
||||
return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds, onlyForcedIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a collection of resource IDs (resource type + id), resolves the internal persistent IDs.
|
||||
* <p>
|
||||
* This implementation will always try to use a cache for performance, meaning that it can resolve resources that
|
||||
* are deleted (but note that forced IDs can't change, so the cache can't return incorrect results)
|
||||
*
|
||||
* @param theOnlyForcedIds If <code>true</code>, resources which are not existing forced IDs will not be resolved
|
||||
*/
|
||||
@Override
|
||||
@Nonnull
|
||||
public List<JpaPid> resolveResourcePersistentIdsWithCache(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId, List<IIdType> theIds, boolean theOnlyForcedIds) {
|
||||
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
|
||||
|
||||
List<JpaPid> retVal = new ArrayList<>(theIds.size());
|
||||
|
||||
for (IIdType id : theIds) {
|
||||
if (!id.hasIdPart()) {
|
||||
throw new InvalidRequestException(Msg.code(1101) + "Parameter value missing in request");
|
||||
}
|
||||
}
|
||||
|
||||
if (!theIds.isEmpty()) {
|
||||
Set<IIdType> idsToCheck = new HashSet<>(theIds.size());
|
||||
for (IIdType nextId : theIds) {
|
||||
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
|
||||
if (nextId.isIdPartValidLong()) {
|
||||
if (!theOnlyForcedIds) {
|
||||
JpaPid jpaPid = JpaPid.fromId(nextId.getIdPartAsLong());
|
||||
jpaPid.setAssociatedResourceId(nextId);
|
||||
retVal.add(jpaPid);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, nextId.getResourceType(), nextId.getIdPart());
|
||||
JpaPid cachedId = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key);
|
||||
if (cachedId != null) {
|
||||
retVal.add(cachedId);
|
||||
continue;
|
||||
}
|
||||
|
||||
idsToCheck.add(nextId);
|
||||
}
|
||||
new QueryChunker<IIdType>();
|
||||
TaskChunker.chunk(
|
||||
idsToCheck,
|
||||
SearchBuilder.getMaximumPageSize() / 2,
|
||||
ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void doResolvePersistentIds(
|
||||
RequestPartitionId theRequestPartitionId, List<IIdType> theIds, List<JpaPid> theOutputListToPopulate) {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<Tuple> criteriaQuery = cb.createTupleQuery();
|
||||
Root<ResourceTable> from = criteriaQuery.from(ResourceTable.class);
|
||||
|
||||
/*
|
||||
* IDX_RES_FHIR_ID covers these columns, but RES_ID is only INCLUDEd.
|
||||
* Only PG, and MSSql support INCLUDE COLUMNS.
|
||||
* @see AddIndexTask.generateSql
|
||||
*/
|
||||
criteriaQuery.multiselect(from.get("myId"), from.get("myResourceType"), from.get("myFhirId"));
|
||||
|
||||
// one create one clause per id.
|
||||
List<Predicate> predicates = new ArrayList<>(theIds.size());
|
||||
for (IIdType next : theIds) {
|
||||
|
||||
List<Predicate> andPredicates = new ArrayList<>(3);
|
||||
|
||||
if (isNotBlank(next.getResourceType())) {
|
||||
Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType());
|
||||
andPredicates.add(typeCriteria);
|
||||
}
|
||||
|
||||
Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart());
|
||||
andPredicates.add(idCriteria);
|
||||
getOptionalPartitionPredicate(theRequestPartitionId, cb, from).ifPresent(andPredicates::add);
|
||||
predicates.add(cb.and(andPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
}
|
||||
|
||||
// join all the clauses as OR
|
||||
criteriaQuery.where(cb.or(predicates.toArray(EMPTY_PREDICATE_ARRAY)));
|
||||
|
||||
TypedQuery<Tuple> query = myEntityManager.createQuery(criteriaQuery);
|
||||
List<Tuple> results = query.getResultList();
|
||||
for (Tuple nextId : results) {
|
||||
// Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
|
||||
Long resourceId = nextId.get(0, Long.class);
|
||||
String resourceType = nextId.get(1, String.class);
|
||||
String forcedId = nextId.get(2, String.class);
|
||||
if (resourceId != null) {
|
||||
JpaPid jpaPid = JpaPid.fromId(resourceId);
|
||||
populateAssociatedResourceId(resourceType, forcedId, jpaPid);
|
||||
theOutputListToPopulate.add(jpaPid);
|
||||
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, resourceType, forcedId);
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, jpaPid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return optional predicate for searching on forcedId
|
||||
* 1. If the partition mode is ALLOWED_UNQUALIFIED, the return optional predicate will be empty, so search is across all partitions.
|
||||
|
@ -609,7 +419,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
return Optional.of(partitionIdNullCriteria);
|
||||
} else {
|
||||
Predicate partitionIdCriteria = from.get("myPartitionIdValue")
|
||||
.in(partitionIds.stream().filter(t -> t != null).collect(Collectors.toList()));
|
||||
.in(partitionIds.stream().filter(Objects::nonNull).collect(Collectors.toList()));
|
||||
return Optional.of(cb.or(partitionIdCriteria, partitionIdNullCriteria));
|
||||
}
|
||||
} else {
|
||||
|
@ -719,10 +529,14 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
}
|
||||
|
||||
/**
|
||||
* Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods
|
||||
* This method can be called to pre-emptively add entries to the ID cache. It should
|
||||
* be called by DAO methods if they are creating or changing the deleted status
|
||||
* of a resource. This method returns immediately, but the data is not
|
||||
* added to the internal caches until the current DB transaction is successfully
|
||||
* committed, and nothing is added if the transaction rolls back.
|
||||
*/
|
||||
@Override
|
||||
public void addResolvedPidToFhirId(
|
||||
public void addResolvedPidToFhirIdAfterCommit(
|
||||
@Nonnull JpaPid theJpaPid,
|
||||
@Nonnull RequestPartitionId theRequestPartitionId,
|
||||
@Nonnull String theResourceType,
|
||||
|
@ -736,11 +550,9 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
|
||||
theJpaPid.getId(),
|
||||
Optional.of(theResourceType + "/" + theFhirId));
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theFhirId);
|
||||
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid);
|
||||
|
||||
JpaResourceLookup lookup = new JpaResourceLookup(
|
||||
theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
|
||||
theResourceType, theFhirId, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
|
||||
|
||||
MemoryCacheService.ForcedIdCacheKey fhirIdKey =
|
||||
new MemoryCacheService.ForcedIdCacheKey(theResourceType, theFhirId, theRequestPartitionId);
|
||||
|
@ -763,13 +575,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
myPartitionSettings = thePartitionSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public List<JpaPid> getPidsOrThrowException(
|
||||
@Nonnull RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
|
||||
return resolveResourcePersistentIdsWithCache(theRequestPartitionId, theIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public JpaPid getPidOrNull(@Nonnull RequestPartitionId theRequestPartitionId, IBaseResource theResource) {
|
||||
|
@ -792,17 +597,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public JpaPid getPidOrThrowException(@Nonnull RequestPartitionId theRequestPartitionId, IIdType theId) {
|
||||
List<IIdType> ids = Collections.singletonList(theId);
|
||||
List<JpaPid> resourcePersistentIds = resolveResourcePersistentIdsWithCache(theRequestPartitionId, ids);
|
||||
if (resourcePersistentIds.isEmpty()) {
|
||||
throw new InvalidRequestException(Msg.code(2295) + "Invalid ID was provided: [" + theId.getIdPart() + "]");
|
||||
}
|
||||
return resourcePersistentIds.get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nonnull
|
||||
public JpaPid getPidOrThrowException(@Nonnull IAnyResource theResource) {
|
||||
|
@ -861,15 +655,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
public static boolean isValidPid(IIdType theId) {
|
||||
if (theId == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String idPart = theId.getIdPart();
|
||||
return isValidPid(idPart);
|
||||
}
|
||||
|
||||
public static boolean isValidPid(String theIdPart) {
|
||||
return StringUtils.isNumeric(theIdPart);
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.dao.mdm;
|
|||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkJpaRepository;
|
||||
import ca.uhn.fhir.jpa.entity.HapiFhirEnversRevision;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
|
@ -455,9 +456,13 @@ public class MdmLinkDaoJpaImpl implements IMdmLinkDao<JpaPid, MdmLink> {
|
|||
@Nonnull
|
||||
private List<Long> convertToLongIds(List<IIdType> theMdmHistorySearchParameters) {
|
||||
return myIdHelperService
|
||||
.getPidsOrThrowException(RequestPartitionId.allPartitions(), theMdmHistorySearchParameters)
|
||||
.resolveResourceIdentities(
|
||||
RequestPartitionId.allPartitions(),
|
||||
theMdmHistorySearchParameters,
|
||||
ResolveIdentityMode.includeDeleted().cacheOk())
|
||||
.values()
|
||||
.stream()
|
||||
.map(JpaPid::getId)
|
||||
.map(t -> t.getPersistentId().getId())
|
||||
.collect(Collectors.toUnmodifiableList());
|
||||
}
|
||||
|
||||
|
|
|
@ -29,16 +29,34 @@ import java.util.Date;
|
|||
public class JpaResourceLookup implements IResourceLookup<JpaPid> {
|
||||
|
||||
private final String myResourceType;
|
||||
private final Long myResourcePid;
|
||||
private final JpaPid myResourcePid;
|
||||
private final Date myDeletedAt;
|
||||
private final PartitionablePartitionId myPartitionablePartitionId;
|
||||
private final String myFhirId;
|
||||
|
||||
public JpaResourceLookup(
|
||||
String theResourceType,
|
||||
String theFhirId,
|
||||
Long theResourcePid,
|
||||
Date theDeletedAt,
|
||||
PartitionablePartitionId thePartitionablePartitionId) {
|
||||
myResourceType = theResourceType;
|
||||
myFhirId = theFhirId;
|
||||
myDeletedAt = theDeletedAt;
|
||||
myPartitionablePartitionId = thePartitionablePartitionId;
|
||||
|
||||
myResourcePid = JpaPid.fromId(theResourcePid);
|
||||
myResourcePid.setPartitionablePartitionId(myPartitionablePartitionId);
|
||||
}
|
||||
|
||||
public JpaResourceLookup(
|
||||
String theResourceType,
|
||||
String theFhirId,
|
||||
JpaPid theResourcePid,
|
||||
Date theDeletedAt,
|
||||
PartitionablePartitionId thePartitionablePartitionId) {
|
||||
myResourceType = theResourceType;
|
||||
myFhirId = theFhirId;
|
||||
myResourcePid = theResourcePid;
|
||||
myDeletedAt = theDeletedAt;
|
||||
myPartitionablePartitionId = thePartitionablePartitionId;
|
||||
|
@ -49,6 +67,11 @@ public class JpaResourceLookup implements IResourceLookup<JpaPid> {
|
|||
return myResourceType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFhirId() {
|
||||
return myFhirId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date getDeleted() {
|
||||
return myDeletedAt;
|
||||
|
@ -56,10 +79,7 @@ public class JpaResourceLookup implements IResourceLookup<JpaPid> {
|
|||
|
||||
@Override
|
||||
public JpaPid getPersistentId() {
|
||||
JpaPid jpaPid = JpaPid.fromId(myResourcePid);
|
||||
jpaPid.setPartitionablePartitionId(myPartitionablePartitionId);
|
||||
|
||||
return jpaPid;
|
||||
return myResourcePid;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -559,7 +559,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
@Transactional(readOnly = true)
|
||||
public IBaseResource loadPackageAssetByUrl(FhirVersionEnum theFhirVersion, String theCanonicalUrl) {
|
||||
|
||||
String canonicalUrl = theCanonicalUrl;
|
||||
|
|
|
@ -35,6 +35,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
|
@ -121,7 +122,7 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
|
||||
@Autowired
|
||||
private IIdHelperService myIdHelperService;
|
||||
private IIdHelperService<JpaPid> myIdHelperService;
|
||||
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
|
@ -282,9 +283,11 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
|
|||
inverse = true;
|
||||
}
|
||||
|
||||
List<JpaPid> targetPids =
|
||||
myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, targetIds);
|
||||
List<Long> targetPidList = JpaPid.toLongList(targetPids);
|
||||
List<JpaPid> pids = myIdHelperService.resolveResourcePids(
|
||||
theRequestPartitionId,
|
||||
targetIds,
|
||||
ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
List<Long> targetPidList = pids.stream().map(JpaPid::getId).collect(Collectors.toList());
|
||||
|
||||
if (targetPidList.isEmpty() && targetQualifiedUrls.isEmpty()) {
|
||||
setMatchNothing();
|
||||
|
|
|
@ -34,9 +34,9 @@ import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
|||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptClientMappingSvc;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.persistence.PersistenceContext;
|
||||
import jakarta.persistence.PersistenceContextType;
|
||||
|
@ -73,18 +73,12 @@ public class TermConceptClientMappingSvcImpl implements ITermConceptClientMappin
|
|||
|
||||
private final int myFetchSize = TermReadSvcImpl.DEFAULT_FETCH_SIZE;
|
||||
|
||||
protected static boolean ourLastResultsFromTranslationCache; // For testing.
|
||||
protected static boolean ourLastResultsFromTranslationWithReverseCache; // For testing.
|
||||
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
|
||||
@Autowired
|
||||
protected FhirContext myContext;
|
||||
|
||||
@Autowired
|
||||
protected MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@Autowired
|
||||
protected IIdHelperService<JpaPid> myIdHelperService;
|
||||
|
||||
|
@ -107,7 +101,6 @@ public class TermConceptClientMappingSvcImpl implements ITermConceptClientMappin
|
|||
Join<TermConceptMapGroup, TermConceptMap> conceptMapJoin = groupJoin.join("myConceptMap");
|
||||
|
||||
List<TranslationQuery> translationQueries = theTranslationRequest.getTranslationQueries();
|
||||
List<TranslateConceptResult> cachedTargets;
|
||||
ArrayList<Predicate> predicates;
|
||||
Coding coding;
|
||||
|
||||
|
@ -117,110 +110,99 @@ public class TermConceptClientMappingSvcImpl implements ITermConceptClientMappin
|
|||
latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest);
|
||||
|
||||
for (TranslationQuery translationQuery : translationQueries) {
|
||||
cachedTargets = myMemoryCacheService.getIfPresent(
|
||||
MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION, translationQuery);
|
||||
if (cachedTargets == null) {
|
||||
final List<TranslateConceptResult> targets = new ArrayList<>();
|
||||
final List<TranslateConceptResult> targets = new ArrayList<>();
|
||||
|
||||
predicates = new ArrayList<>();
|
||||
predicates = new ArrayList<>();
|
||||
|
||||
coding = translationQuery.getCoding();
|
||||
if (coding.hasCode()) {
|
||||
predicates.add(criteriaBuilder.equal(elementJoin.get("myCode"), coding.getCode()));
|
||||
} else {
|
||||
throw new InvalidRequestException(
|
||||
Msg.code(842) + "A code must be provided for translation to occur.");
|
||||
}
|
||||
|
||||
if (coding.hasSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), coding.getSystem()));
|
||||
}
|
||||
|
||||
if (coding.hasVersion()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySourceVersion"), coding.getVersion()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTargetSystem()) {
|
||||
predicates.add(
|
||||
criteriaBuilder.equal(groupJoin.get("myTarget"), translationQuery.getTargetSystem()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasUrl()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl()));
|
||||
if (translationQuery.hasConceptMapVersion()) {
|
||||
// both url and conceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(
|
||||
conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion()));
|
||||
} else {
|
||||
if (StringUtils.isNotBlank(latestConceptMapVersion)) {
|
||||
// only url and use latestConceptMapVersion
|
||||
predicates.add(
|
||||
criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion));
|
||||
} else {
|
||||
predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (translationQuery.hasSource()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getSource()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTarget()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getTarget()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasResourceId()) {
|
||||
IIdType resourceId = translationQuery.getResourceId();
|
||||
JpaPid resourcePid =
|
||||
myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId);
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId()));
|
||||
}
|
||||
|
||||
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
|
||||
query.where(outerPredicate);
|
||||
|
||||
// Use scrollable results.
|
||||
final TypedQuery<TermConceptMapGroupElementTarget> typedQuery =
|
||||
myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptMapGroupElementTarget> hibernateQuery =
|
||||
(org.hibernate.query.Query<TermConceptMapGroupElementTarget>) typedQuery;
|
||||
hibernateQuery.setFetchSize(myFetchSize);
|
||||
ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptMapGroupElementTarget> scrollableResultsIterator =
|
||||
new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
|
||||
Set<TermConceptMapGroupElementTarget> matches = new HashSet<>();
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConceptMapGroupElementTarget next = scrollableResultsIterator.next();
|
||||
if (matches.add(next)) {
|
||||
|
||||
TranslateConceptResult translationMatch = new TranslateConceptResult();
|
||||
if (next.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(
|
||||
next.getEquivalence().toCode());
|
||||
}
|
||||
|
||||
translationMatch.setCode(next.getCode());
|
||||
translationMatch.setSystem(next.getSystem());
|
||||
translationMatch.setSystemVersion(next.getSystemVersion());
|
||||
translationMatch.setDisplay(next.getDisplay());
|
||||
translationMatch.setValueSet(next.getValueSet());
|
||||
translationMatch.setSystemVersion(next.getSystemVersion());
|
||||
translationMatch.setConceptMapUrl(next.getConceptMapUrl());
|
||||
|
||||
targets.add(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ourLastResultsFromTranslationCache = false; // For testing.
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION, translationQuery, targets);
|
||||
retVal.getResults().addAll(targets);
|
||||
coding = translationQuery.getCoding();
|
||||
if (coding.hasCode()) {
|
||||
predicates.add(criteriaBuilder.equal(elementJoin.get("myCode"), coding.getCode()));
|
||||
} else {
|
||||
ourLastResultsFromTranslationCache = true; // For testing.
|
||||
retVal.getResults().addAll(cachedTargets);
|
||||
throw new InvalidRequestException(Msg.code(842) + "A code must be provided for translation to occur.");
|
||||
}
|
||||
|
||||
if (coding.hasSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), coding.getSystem()));
|
||||
}
|
||||
|
||||
if (coding.hasVersion()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySourceVersion"), coding.getVersion()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTargetSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), translationQuery.getTargetSystem()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasUrl()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl()));
|
||||
if (translationQuery.hasConceptMapVersion()) {
|
||||
// both url and conceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(
|
||||
conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion()));
|
||||
} else {
|
||||
if (StringUtils.isNotBlank(latestConceptMapVersion)) {
|
||||
// only url and use latestConceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion));
|
||||
} else {
|
||||
predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (translationQuery.hasSource()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getSource()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTarget()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getTarget()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasResourceId()) {
|
||||
IIdType resourceId = translationQuery.getResourceId();
|
||||
JpaPid resourcePid =
|
||||
myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId);
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId()));
|
||||
}
|
||||
|
||||
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
|
||||
query.where(outerPredicate);
|
||||
|
||||
// Use scrollable results.
|
||||
final TypedQuery<TermConceptMapGroupElementTarget> typedQuery =
|
||||
myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptMapGroupElementTarget> hibernateQuery =
|
||||
(org.hibernate.query.Query<TermConceptMapGroupElementTarget>) typedQuery;
|
||||
hibernateQuery.setFetchSize(myFetchSize);
|
||||
ScrollableResults<TermConceptMapGroupElementTarget> scrollableResults =
|
||||
hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptMapGroupElementTarget> scrollableResultsIterator =
|
||||
new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
|
||||
Set<TermConceptMapGroupElementTarget> matches = new HashSet<>();
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConceptMapGroupElementTarget next = scrollableResultsIterator.next();
|
||||
if (matches.add(next)) {
|
||||
|
||||
TranslateConceptResult translationMatch = new TranslateConceptResult();
|
||||
if (next.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(
|
||||
next.getEquivalence().toCode());
|
||||
}
|
||||
|
||||
translationMatch.setCode(next.getCode());
|
||||
translationMatch.setSystem(next.getSystem());
|
||||
translationMatch.setSystemVersion(next.getSystemVersion());
|
||||
translationMatch.setDisplay(next.getDisplay());
|
||||
translationMatch.setValueSet(next.getValueSet());
|
||||
translationMatch.setSystemVersion(next.getSystemVersion());
|
||||
translationMatch.setConceptMapUrl(next.getConceptMapUrl());
|
||||
|
||||
targets.add(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
retVal.getResults().addAll(targets);
|
||||
}
|
||||
|
||||
buildTranslationResult(retVal);
|
||||
|
@ -242,7 +224,6 @@ public class TermConceptClientMappingSvcImpl implements ITermConceptClientMappin
|
|||
Join<TermConceptMapGroup, TermConceptMap> conceptMapJoin = groupJoin.join("myConceptMap");
|
||||
|
||||
List<TranslationQuery> translationQueries = theTranslationRequest.getTranslationQueries();
|
||||
List<TranslateConceptResult> cachedElements;
|
||||
ArrayList<Predicate> predicates;
|
||||
Coding coding;
|
||||
|
||||
|
@ -252,145 +233,137 @@ public class TermConceptClientMappingSvcImpl implements ITermConceptClientMappin
|
|||
latestConceptMapVersion = getLatestConceptMapVersion(theTranslationRequest);
|
||||
|
||||
for (TranslationQuery translationQuery : translationQueries) {
|
||||
cachedElements = myMemoryCacheService.getIfPresent(
|
||||
MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery);
|
||||
if (cachedElements == null) {
|
||||
final List<TranslateConceptResult> elements = new ArrayList<>();
|
||||
final List<TranslateConceptResult> elements = new ArrayList<>();
|
||||
|
||||
predicates = new ArrayList<>();
|
||||
predicates = new ArrayList<>();
|
||||
|
||||
coding = translationQuery.getCoding();
|
||||
String targetCode;
|
||||
String targetCodeSystem = null;
|
||||
if (coding.hasCode()) {
|
||||
predicates.add(criteriaBuilder.equal(targetJoin.get("myCode"), coding.getCode()));
|
||||
targetCode = coding.getCode();
|
||||
coding = translationQuery.getCoding();
|
||||
String targetCode;
|
||||
String targetCodeSystem = null;
|
||||
if (coding.hasCode()) {
|
||||
predicates.add(criteriaBuilder.equal(targetJoin.get("myCode"), coding.getCode()));
|
||||
targetCode = coding.getCode();
|
||||
} else {
|
||||
throw new InvalidRequestException(Msg.code(843) + "A code must be provided for translation to occur.");
|
||||
}
|
||||
|
||||
if (coding.hasSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), coding.getSystem()));
|
||||
targetCodeSystem = coding.getSystem();
|
||||
}
|
||||
|
||||
if (coding.hasVersion()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTargetVersion"), coding.getVersion()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasUrl()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl()));
|
||||
if (translationQuery.hasConceptMapVersion()) {
|
||||
// both url and conceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(
|
||||
conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion()));
|
||||
} else {
|
||||
throw new InvalidRequestException(
|
||||
Msg.code(843) + "A code must be provided for translation to occur.");
|
||||
}
|
||||
|
||||
if (coding.hasSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTarget"), coding.getSystem()));
|
||||
targetCodeSystem = coding.getSystem();
|
||||
}
|
||||
|
||||
if (coding.hasVersion()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("myTargetVersion"), coding.getVersion()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasUrl()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myUrl"), translationQuery.getUrl()));
|
||||
if (translationQuery.hasConceptMapVersion()) {
|
||||
// both url and conceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(
|
||||
conceptMapJoin.get("myVersion"), translationQuery.getConceptMapVersion()));
|
||||
if (StringUtils.isNotBlank(latestConceptMapVersion)) {
|
||||
// only url and use latestConceptMapVersion
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion));
|
||||
} else {
|
||||
if (StringUtils.isNotBlank(latestConceptMapVersion)) {
|
||||
// only url and use latestConceptMapVersion
|
||||
predicates.add(
|
||||
criteriaBuilder.equal(conceptMapJoin.get("myVersion"), latestConceptMapVersion));
|
||||
} else {
|
||||
predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion")));
|
||||
}
|
||||
predicates.add(criteriaBuilder.isNull(conceptMapJoin.get("myVersion")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (translationQuery.hasTargetSystem()) {
|
||||
predicates.add(
|
||||
criteriaBuilder.equal(groupJoin.get("mySource"), translationQuery.getTargetSystem()));
|
||||
}
|
||||
if (translationQuery.hasTargetSystem()) {
|
||||
predicates.add(criteriaBuilder.equal(groupJoin.get("mySource"), translationQuery.getTargetSystem()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasSource()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getSource()));
|
||||
}
|
||||
if (translationQuery.hasSource()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myTarget"), translationQuery.getSource()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasTarget()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getTarget()));
|
||||
}
|
||||
if (translationQuery.hasTarget()) {
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("mySource"), translationQuery.getTarget()));
|
||||
}
|
||||
|
||||
if (translationQuery.hasResourceId()) {
|
||||
IIdType resourceId = translationQuery.getResourceId();
|
||||
JpaPid resourcePid =
|
||||
myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId);
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId()));
|
||||
}
|
||||
if (translationQuery.hasResourceId()) {
|
||||
IIdType resourceId = translationQuery.getResourceId();
|
||||
JpaPid resourcePid =
|
||||
myIdHelperService.getPidOrThrowException(RequestPartitionId.defaultPartition(), resourceId);
|
||||
predicates.add(criteriaBuilder.equal(conceptMapJoin.get("myResourcePid"), resourcePid.getId()));
|
||||
}
|
||||
|
||||
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
|
||||
query.where(outerPredicate);
|
||||
Predicate outerPredicate = criteriaBuilder.and(predicates.toArray(new Predicate[0]));
|
||||
query.where(outerPredicate);
|
||||
|
||||
// Use scrollable results.
|
||||
final TypedQuery<TermConceptMapGroupElement> typedQuery =
|
||||
myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptMapGroupElement> hibernateQuery =
|
||||
(org.hibernate.query.Query<TermConceptMapGroupElement>) typedQuery;
|
||||
hibernateQuery.setFetchSize(myFetchSize);
|
||||
ScrollableResults scrollableResults = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptMapGroupElement> scrollableResultsIterator =
|
||||
new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
// Use scrollable results.
|
||||
final TypedQuery<TermConceptMapGroupElement> typedQuery = myEntityManager.createQuery(query.select(root));
|
||||
org.hibernate.query.Query<TermConceptMapGroupElement> hibernateQuery =
|
||||
(org.hibernate.query.Query<TermConceptMapGroupElement>) typedQuery;
|
||||
hibernateQuery.setFetchSize(myFetchSize);
|
||||
ScrollableResults<TermConceptMapGroupElement> scrollableResults =
|
||||
hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||
try (ScrollableResultsIterator<TermConceptMapGroupElement> scrollableResultsIterator =
|
||||
new ScrollableResultsIterator<>(scrollableResults)) {
|
||||
|
||||
Set<TermConceptMapGroupElementTarget> matches = new HashSet<>();
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConceptMapGroupElement nextElement = scrollableResultsIterator.next();
|
||||
Set<TermConceptMapGroupElementTarget> matches = new HashSet<>();
|
||||
while (scrollableResultsIterator.hasNext()) {
|
||||
TermConceptMapGroupElement nextElement = scrollableResultsIterator.next();
|
||||
|
||||
/* TODO: The invocation of the size() below does not seem to be necessary but for some reason,
|
||||
* but removing it causes tests in TerminologySvcImplR4Test to fail. We use the outcome
|
||||
* in a trace log to avoid ErrorProne flagging an unused return value.
|
||||
*/
|
||||
int size =
|
||||
nextElement.getConceptMapGroupElementTargets().size();
|
||||
ourLog.trace("Have {} targets", size);
|
||||
/* TODO: The invocation of the size() below does not seem to be necessary but for some reason,
|
||||
* but removing it causes tests in TerminologySvcImplR4Test to fail. We use the outcome
|
||||
* in a trace log to avoid ErrorProne flagging an unused return value.
|
||||
*/
|
||||
int size = nextElement.getConceptMapGroupElementTargets().size();
|
||||
ourLog.trace("Have {} targets", size);
|
||||
|
||||
myEntityManager.detach(nextElement);
|
||||
myEntityManager.detach(nextElement);
|
||||
|
||||
if (isNotBlank(targetCode)) {
|
||||
for (TermConceptMapGroupElementTarget next :
|
||||
nextElement.getConceptMapGroupElementTargets()) {
|
||||
if (matches.add(next)) {
|
||||
if (isBlank(targetCodeSystem)
|
||||
|| StringUtils.equals(targetCodeSystem, next.getSystem())) {
|
||||
if (StringUtils.equals(targetCode, next.getCode())) {
|
||||
TranslateConceptResult translationMatch = new TranslateConceptResult();
|
||||
translationMatch.setCode(nextElement.getCode());
|
||||
translationMatch.setSystem(nextElement.getSystem());
|
||||
translationMatch.setSystemVersion(nextElement.getSystemVersion());
|
||||
translationMatch.setDisplay(nextElement.getDisplay());
|
||||
translationMatch.setValueSet(nextElement.getValueSet());
|
||||
translationMatch.setSystemVersion(nextElement.getSystemVersion());
|
||||
translationMatch.setConceptMapUrl(nextElement.getConceptMapUrl());
|
||||
if (next.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(
|
||||
next.getEquivalence().toCode());
|
||||
}
|
||||
if (isNotBlank(targetCode)) {
|
||||
for (TermConceptMapGroupElementTarget next : nextElement.getConceptMapGroupElementTargets()) {
|
||||
if (matches.add(next)) {
|
||||
if (isBlank(targetCodeSystem)
|
||||
|| StringUtils.equals(targetCodeSystem, next.getSystem())) {
|
||||
if (StringUtils.equals(targetCode, next.getCode())) {
|
||||
TranslateConceptResult translationMatch =
|
||||
newTranslateConceptResult(nextElement, next);
|
||||
|
||||
if (alreadyContainsMapping(elements, translationMatch)
|
||||
|| alreadyContainsMapping(retVal.getResults(), translationMatch)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
elements.add(translationMatch);
|
||||
if (alreadyContainsMapping(elements, translationMatch)
|
||||
|| alreadyContainsMapping(retVal.getResults(), translationMatch)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
elements.add(translationMatch);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ourLastResultsFromTranslationWithReverseCache = false; // For testing.
|
||||
myMemoryCacheService.put(
|
||||
MemoryCacheService.CacheEnum.CONCEPT_TRANSLATION_REVERSE, translationQuery, elements);
|
||||
retVal.getResults().addAll(elements);
|
||||
} else {
|
||||
ourLastResultsFromTranslationWithReverseCache = true; // For testing.
|
||||
retVal.getResults().addAll(cachedElements);
|
||||
}
|
||||
|
||||
retVal.getResults().addAll(elements);
|
||||
}
|
||||
|
||||
buildTranslationResult(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private static TranslateConceptResult newTranslateConceptResult(
|
||||
TermConceptMapGroupElement theGroup, TermConceptMapGroupElementTarget theTarget) {
|
||||
TranslateConceptResult translationMatch = new TranslateConceptResult();
|
||||
translationMatch.setCode(theGroup.getCode());
|
||||
translationMatch.setSystem(theGroup.getSystem());
|
||||
translationMatch.setSystemVersion(theGroup.getSystemVersion());
|
||||
translationMatch.setDisplay(theGroup.getDisplay());
|
||||
translationMatch.setValueSet(theGroup.getValueSet());
|
||||
translationMatch.setSystemVersion(theGroup.getSystemVersion());
|
||||
translationMatch.setConceptMapUrl(theGroup.getConceptMapUrl());
|
||||
if (theTarget.getEquivalence() != null) {
|
||||
translationMatch.setEquivalence(theTarget.getEquivalence().toCode());
|
||||
}
|
||||
return translationMatch;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FhirContext getFhirContext() {
|
||||
return myContext;
|
||||
|
|
|
@ -35,7 +35,6 @@ import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
|
@ -315,38 +314,6 @@ public class TermConceptMappingSvcImpl extends TermConceptClientMappingSvcImpl i
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void clearOurLastResultsFromTranslationCache() {
|
||||
ourLastResultsFromTranslationCache = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public static void clearOurLastResultsFromTranslationWithReverseCache() {
|
||||
ourLastResultsFromTranslationWithReverseCache = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
static boolean isOurLastResultsFromTranslationCache() {
|
||||
return ourLastResultsFromTranslationCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
static boolean isOurLastResultsFromTranslationWithReverseCache() {
|
||||
return ourLastResultsFromTranslationWithReverseCache;
|
||||
}
|
||||
|
||||
public static Parameters toParameters(TranslateConceptResults theTranslationResult) {
|
||||
Parameters retVal = new Parameters();
|
||||
|
||||
|
|
|
@ -158,6 +158,7 @@ import static ca.uhn.fhir.jpa.term.loinc.LoincUploadPropertiesEnum.LOINC_UPLOAD_
|
|||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_ALL_VALUESET_ID;
|
||||
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_GENERIC_VALUESET_URL;
|
||||
|
||||
public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
||||
public static final String CUSTOM_CONCEPTS_FILE = "concepts.csv";
|
||||
|
@ -1026,7 +1027,7 @@ public class TermLoaderSvcImpl implements ITermLoaderSvc {
|
|||
valueSetId = LOINC_ALL_VALUESET_ID;
|
||||
}
|
||||
retVal.setId(valueSetId);
|
||||
retVal.setUrl("http://loinc.org/vs");
|
||||
retVal.setUrl(LOINC_GENERIC_VALUESET_URL);
|
||||
retVal.setVersion(codeSystemVersionId);
|
||||
retVal.setName("All LOINC codes");
|
||||
retVal.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
|
|
|
@ -50,6 +50,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptViewDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptViewOracleDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.entity.ITermValueSetConceptView;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
|
@ -80,8 +81,6 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.sl.cache.Cache;
|
||||
import ca.uhn.fhir.sl.cache.CacheFactory;
|
||||
import ca.uhn.fhir.util.CoverageIgnore;
|
||||
import ca.uhn.fhir.util.FhirVersionIndependentConcept;
|
||||
import ca.uhn.fhir.util.HapiExtensions;
|
||||
|
@ -205,10 +204,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
private static final String OUR_PIPE_CHARACTER = "|";
|
||||
private static final int SECONDS_IN_MINUTE = 60;
|
||||
private static final int INDEXED_ROOTS_LOGGING_COUNT = 50_000;
|
||||
private static final String CS_USERDATA_CURRENT_VERSION = TermReadSvcImpl.class.getName() + "_CS_CURRENT_VERSION";
|
||||
private static final String VS_USERDATA_CURRENT_VERSION = TermReadSvcImpl.class.getName() + "_VS_CURRENT_VERSION";
|
||||
private static Runnable myInvokeOnNextCallForUnitTest;
|
||||
private static boolean ourForceDisableHibernateSearchForUnitTest;
|
||||
private final Cache<String, TermCodeSystemVersionDetails> myCodeSystemCurrentVersionCache =
|
||||
CacheFactory.build(TimeUnit.MINUTES.toMillis(1));
|
||||
|
||||
@Autowired
|
||||
protected DaoRegistry myDaoRegistry;
|
||||
|
@ -300,7 +299,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
if (isBlank(theSystem)) {
|
||||
return false;
|
||||
}
|
||||
TermCodeSystemVersionDetails cs = getCurrentCodeSystemVersion(theSystem);
|
||||
TermCodeSystemVersionDetails cs = getCurrentCodeSystemVersion(theValidationSupportContext, theSystem);
|
||||
return cs != null;
|
||||
}
|
||||
|
||||
|
@ -459,14 +458,6 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is present only for unit tests, do not call from client code
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public void clearCaches() {
|
||||
myCodeSystemCurrentVersionCache.invalidateAll();
|
||||
}
|
||||
|
||||
public Optional<TermValueSet> deleteValueSetForResource(ResourceTable theResourceTable) {
|
||||
// Get existing entity so it can be deleted.
|
||||
Optional<TermValueSet> optionalExistingTermValueSetById =
|
||||
|
@ -678,10 +669,10 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
private String toHumanReadableExpansionTimestamp(TermValueSet termValueSet) {
|
||||
String expansionTimestamp = "(unknown)";
|
||||
if (termValueSet.getExpansionTimestamp() != null) {
|
||||
String timeElapsed = StopWatch.formatMillis(System.currentTimeMillis()
|
||||
- termValueSet.getExpansionTimestamp().getTime());
|
||||
expansionTimestamp = new InstantType(termValueSet.getExpansionTimestamp()).getValueAsString() + " ("
|
||||
+ timeElapsed + " ago)";
|
||||
// Note: We used to append "123ms ago" to the timestamp, but we cache the
|
||||
// results here, so it's just kind of weird to do that since the duration will
|
||||
// be out of date when the entry comes back from cache
|
||||
expansionTimestamp = new InstantType(termValueSet.getExpansionTimestamp()).getValueAsString();
|
||||
}
|
||||
return expansionTimestamp;
|
||||
}
|
||||
|
@ -2059,7 +2050,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
@Transactional(readOnly = true)
|
||||
public boolean isValueSetPreExpandedForCodeValidation(ValueSet theValueSet) {
|
||||
Optional<TermValueSet> optionalTermValueSet = fetchValueSetEntity(theValueSet);
|
||||
|
||||
|
@ -2084,9 +2075,18 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return true;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"OptionalAssignedToNull", "unchecked"})
|
||||
private Optional<TermValueSet> fetchValueSetEntity(ValueSet theValueSet) {
|
||||
JpaPid valueSetResourcePid = getValueSetResourcePersistentId(theValueSet);
|
||||
return myTermValueSetDao.findByResourcePid(valueSetResourcePid.getId());
|
||||
Optional<TermValueSet> retVal = (Optional<TermValueSet>) theValueSet.getUserData(VS_USERDATA_CURRENT_VERSION);
|
||||
if (retVal == null) {
|
||||
synchronized (theValueSet) {
|
||||
JpaPid valueSetResourcePid = getValueSetResourcePersistentId(theValueSet);
|
||||
retVal = myTermValueSetDao.findByResourcePid(valueSetResourcePid.getId());
|
||||
theValueSet.setUserData(VS_USERDATA_CURRENT_VERSION, retVal);
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private JpaPid getValueSetResourcePersistentId(ValueSet theValueSet) {
|
||||
|
@ -2098,6 +2098,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
}
|
||||
|
||||
protected IValidationSupport.CodeValidationResult validateCodeIsInPreExpandedValueSet(
|
||||
ValidationSupportContext theValidationSupportContext,
|
||||
ConceptValidationOptions theValidationOptions,
|
||||
ValueSet theValueSet,
|
||||
String theSystem,
|
||||
|
@ -2137,9 +2138,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return null;
|
||||
}
|
||||
|
||||
TermValueSet valueSetEntity = myTermValueSetDao
|
||||
.findByResourcePid(valueSetResourcePid.getId())
|
||||
.orElseThrow(IllegalStateException::new);
|
||||
TermValueSet valueSetEntity = fetchValueSetEntity(theValueSet).orElseThrow(IllegalStateException::new);
|
||||
String timingDescription = toHumanReadableExpansionTimestamp(valueSetEntity);
|
||||
String preExpansionMessage = myContext
|
||||
.getLocalizer()
|
||||
|
@ -2255,26 +2254,24 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
public Optional<TermConcept> findCode(String theCodeSystem, String theCode) {
|
||||
/*
|
||||
* Loading concepts without a transaction causes issues later on some
|
||||
* platforms (e.g. PSQL) so this transactiontemplate is here to make
|
||||
* sure that we always call this with an open transaction
|
||||
* platforms (e.g. PSQL) so make sure that we always call this with an open transaction
|
||||
*/
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_MANDATORY);
|
||||
txTemplate.setReadOnly(true);
|
||||
HapiTransactionService.requireTransaction();
|
||||
|
||||
return txTemplate.execute(t -> {
|
||||
TermCodeSystemVersionDetails csv = getCurrentCodeSystemVersion(theCodeSystem);
|
||||
if (csv == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return myConceptDao.findByCodeSystemAndCode(csv.myPid, theCode);
|
||||
});
|
||||
TermCodeSystemVersionDetails csv =
|
||||
getCurrentCodeSystemVersion(new ValidationSupportContext(provideValidationSupport()), theCodeSystem);
|
||||
if (csv == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
return myConceptDao.findByCodeSystemAndCode(csv.myPid, theCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.MANDATORY)
|
||||
public List<TermConcept> findCodes(String theCodeSystem, List<String> theCodeList) {
|
||||
TermCodeSystemVersionDetails csv = getCurrentCodeSystemVersion(theCodeSystem);
|
||||
HapiTransactionService.requireTransaction();
|
||||
|
||||
TermCodeSystemVersionDetails csv =
|
||||
getCurrentCodeSystemVersion(new ValidationSupportContext(provideValidationSupport()), theCodeSystem);
|
||||
if (csv == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
@ -2283,30 +2280,51 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
private TermCodeSystemVersionDetails getCurrentCodeSystemVersion(String theCodeSystemIdentifier) {
|
||||
private TermCodeSystemVersionDetails getCurrentCodeSystemVersion(
|
||||
ValidationSupportContext theValidationSupportContext, String theCodeSystemIdentifier) {
|
||||
String version = getVersionFromIdentifier(theCodeSystemIdentifier);
|
||||
TermCodeSystemVersionDetails retVal = myCodeSystemCurrentVersionCache.get(
|
||||
theCodeSystemIdentifier,
|
||||
t -> myTxTemplate.execute(tx -> {
|
||||
TermCodeSystemVersion csv = null;
|
||||
TermCodeSystem cs =
|
||||
myCodeSystemDao.findByCodeSystemUri(getUrlFromIdentifier(theCodeSystemIdentifier));
|
||||
if (cs != null) {
|
||||
if (version != null) {
|
||||
csv = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(cs.getPid(), version);
|
||||
} else if (cs.getCurrentVersion() != null) {
|
||||
csv = cs.getCurrentVersion();
|
||||
}
|
||||
}
|
||||
if (csv != null) {
|
||||
return new TermCodeSystemVersionDetails(csv.getPid(), csv.getCodeSystemVersionId());
|
||||
} else {
|
||||
return NO_CURRENT_VERSION;
|
||||
}
|
||||
}));
|
||||
if (retVal == NO_CURRENT_VERSION) {
|
||||
return null;
|
||||
|
||||
// Fetch the CodeSystem from ValidationSupport, which should return a cached copy. We
|
||||
// keep a copy of the current version entity in userData in that cached copy
|
||||
// to avoid repeated lookups
|
||||
TermCodeSystemVersionDetails retVal;
|
||||
IBaseResource codeSystem =
|
||||
theValidationSupportContext.getRootValidationSupport().fetchCodeSystem(theCodeSystemIdentifier);
|
||||
if (codeSystem != null) {
|
||||
|
||||
synchronized (codeSystem) {
|
||||
retVal = (TermCodeSystemVersionDetails) codeSystem.getUserData(CS_USERDATA_CURRENT_VERSION);
|
||||
if (retVal == null) {
|
||||
retVal = getCurrentCodeSystemVersion(theCodeSystemIdentifier, version);
|
||||
codeSystem.setUserData(CS_USERDATA_CURRENT_VERSION, retVal);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
retVal = getCurrentCodeSystemVersion(theCodeSystemIdentifier, version);
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private TermCodeSystemVersionDetails getCurrentCodeSystemVersion(String theCodeSystemIdentifier, String version) {
|
||||
TermCodeSystemVersionDetails retVal;
|
||||
retVal = myTxTemplate.execute(tx -> {
|
||||
TermCodeSystemVersion csv = null;
|
||||
TermCodeSystem cs = myCodeSystemDao.findByCodeSystemUri(getUrlFromIdentifier(theCodeSystemIdentifier));
|
||||
if (cs != null) {
|
||||
if (version != null) {
|
||||
csv = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(cs.getPid(), version);
|
||||
} else if (cs.getCurrentVersion() != null) {
|
||||
csv = cs.getCurrentVersion();
|
||||
}
|
||||
}
|
||||
if (csv != null) {
|
||||
return new TermCodeSystemVersionDetails(csv.getPid(), csv.getCodeSystemVersionId());
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
@ -2332,7 +2350,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
@Transactional(propagation = Propagation.REQUIRED, readOnly = true)
|
||||
@Override
|
||||
public Set<TermConcept> findCodesAbove(
|
||||
Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) {
|
||||
|
@ -2352,7 +2370,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Transactional(readOnly = true)
|
||||
@Override
|
||||
public List<FhirVersionIndependentConcept> findCodesAbove(String theSystem, String theCode) {
|
||||
TermCodeSystem cs = getCodeSystem(theSystem);
|
||||
|
@ -2365,7 +2383,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return toVersionIndependentConcepts(theSystem, codes);
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
@Transactional(propagation = Propagation.REQUIRED, readOnly = true)
|
||||
@Override
|
||||
public Set<TermConcept> findCodesBelow(
|
||||
Long theCodeSystemResourcePid, Long theCodeSystemVersionPid, String theCode) {
|
||||
|
@ -2389,7 +2407,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@Transactional(readOnly = true)
|
||||
@Override
|
||||
public List<FhirVersionIndependentConcept> findCodesBelow(String theSystem, String theCode) {
|
||||
TermCodeSystem cs = getCodeSystem(theSystem);
|
||||
|
@ -2508,6 +2526,29 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
provideValidationSupport().invalidateCaches();
|
||||
}
|
||||
|
||||
@SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter")
|
||||
@Override
|
||||
public void invalidateCaches() {
|
||||
/*
|
||||
* Clear out anything left in the userdata caches. We do this mostly because it messes
|
||||
* up unit tests to have these things stick around between test runs, since many of
|
||||
* these resources come from DefaultProfileValidationSupport and therefore live beyond
|
||||
* any single test execution.
|
||||
*/
|
||||
for (IBaseResource next : provideValidationSupport().fetchAllConformanceResources()) {
|
||||
if (next != null) {
|
||||
synchronized (next) {
|
||||
if (next.getUserData(CS_USERDATA_CURRENT_VERSION) != null) {
|
||||
next.setUserData(CS_USERDATA_CURRENT_VERSION, null);
|
||||
}
|
||||
if (next.getUserData(VS_USERDATA_CURRENT_VERSION) != null) {
|
||||
next.setUserData(VS_USERDATA_CURRENT_VERSION, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized boolean isPreExpandingValueSets() {
|
||||
return myPreExpandingValueSets;
|
||||
}
|
||||
|
@ -2823,7 +2864,9 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
txTemplate.setReadOnly(true);
|
||||
Optional<FhirVersionIndependentConcept> codeOpt =
|
||||
txTemplate.execute(tx -> findCode(theCodeSystemUrl, theCode).map(c -> {
|
||||
String codeSystemVersionId = getCurrentCodeSystemVersion(theCodeSystemUrl).myCodeSystemVersionId;
|
||||
String codeSystemVersionId = getCurrentCodeSystemVersion(
|
||||
theValidationSupportContext, theCodeSystemUrl)
|
||||
.myCodeSystemVersionId;
|
||||
return new FhirVersionIndependentConcept(
|
||||
theCodeSystemUrl, c.getCode(), c.getDisplay(), codeSystemVersionId);
|
||||
}));
|
||||
|
@ -2865,13 +2908,20 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
// If we don't have a PID, this came from some source other than the JPA
|
||||
// database, so we don't need to check if it's pre-expanded or not
|
||||
if (valueSet instanceof IAnyResource) {
|
||||
Long pid = IDao.RESOURCE_PID.get((IAnyResource) valueSet);
|
||||
Long pid = IDao.RESOURCE_PID.get(valueSet);
|
||||
if (pid != null) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
|
||||
retVal = txTemplate.execute(tx -> {
|
||||
if (isValueSetPreExpandedForCodeValidation(valueSet)) {
|
||||
return validateCodeIsInPreExpandedValueSet(
|
||||
theValidationOptions, valueSet, theCodeSystem, theCode, theDisplay, null, null);
|
||||
theValidationSupportContext,
|
||||
theValidationOptions,
|
||||
valueSet,
|
||||
theCodeSystem,
|
||||
theCode,
|
||||
theDisplay,
|
||||
null,
|
||||
null);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
@ -3212,6 +3262,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
|
||||
@Override
|
||||
public CodeValidationResult validateCodeIsInPreExpandedValueSet(
|
||||
ValidationSupportContext theValidationSupportContext,
|
||||
ConceptValidationOptions theOptions,
|
||||
IBaseResource theValueSet,
|
||||
String theSystem,
|
||||
|
@ -3226,7 +3277,14 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
myVersionCanonicalizer.codeableConceptToCanonical(theCodeableConcept);
|
||||
|
||||
return validateCodeIsInPreExpandedValueSet(
|
||||
theOptions, valueSetR4, theSystem, theCode, theDisplay, codingR4, codeableConcept);
|
||||
theValidationSupportContext,
|
||||
theOptions,
|
||||
valueSetR4,
|
||||
theSystem,
|
||||
theCode,
|
||||
theDisplay,
|
||||
codingR4,
|
||||
codeableConcept);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -3261,7 +3319,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
|
|||
* Properties returned from method buildSearchScroll
|
||||
*/
|
||||
private static final class SearchProperties {
|
||||
private List<Supplier<SearchScroll<EntityReference>>> mySearchScroll = new ArrayList<>();
|
||||
private final List<Supplier<SearchScroll<EntityReference>>> mySearchScroll = new ArrayList<>();
|
||||
private List<String> myIncludeOrExcludeCodes;
|
||||
|
||||
public List<Supplier<SearchScroll<EntityReference>>> getSearchScroll() {
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.term.api;
|
|||
|
||||
import ca.uhn.fhir.context.support.ConceptValidationOptions;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
|
@ -119,6 +120,7 @@ public interface ITermReadSvc extends IValidationSupport {
|
|||
*/
|
||||
@Transactional()
|
||||
CodeValidationResult validateCodeIsInPreExpandedValueSet(
|
||||
ValidationSupportContext theValidationSupportContext,
|
||||
ConceptValidationOptions theOptions,
|
||||
IBaseResource theValueSet,
|
||||
String theSystem,
|
||||
|
|
|
@ -79,57 +79,6 @@ public class IdHelperServiceTest {
|
|||
lenient().doReturn(true).when(myStorageSettings).isDeleteEnabled();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResolveResourcePersistentIds() {
|
||||
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy();
|
||||
|
||||
//prepare params
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A");
|
||||
String resourceType = "Patient";
|
||||
Long id = 123L;
|
||||
List<String> ids = List.of(String.valueOf(id));
|
||||
ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled();
|
||||
|
||||
//prepare results
|
||||
Patient expectedPatient = new Patient();
|
||||
expectedPatient.setId(ids.get(0));
|
||||
|
||||
// configure mock behaviour
|
||||
when(myStorageSettings.isDeleteEnabled()).thenReturn(true);
|
||||
|
||||
final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode));
|
||||
assertEquals("HAPI-2001: Resource Patient/123 is not known", resourceNotFoundException.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResolveResourcePersistentIdsDeleteFalse() {
|
||||
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy();
|
||||
|
||||
//prepare Params
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A");
|
||||
Long id = 123L;
|
||||
String resourceType = "Patient";
|
||||
List<String> ids = List.of(String.valueOf(id));
|
||||
String forcedId = "(all)/" + resourceType + "/" + id;
|
||||
ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled();
|
||||
|
||||
//prepare results
|
||||
Patient expectedPatient = new Patient();
|
||||
expectedPatient.setId(ids.get(0));
|
||||
|
||||
// configure mock behaviour
|
||||
when(myStorageSettings.isDeleteEnabled()).thenReturn(false);
|
||||
|
||||
Map<String, JpaPid> actualIds = myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode);
|
||||
|
||||
//verifyResult
|
||||
assertFalse(actualIds.isEmpty());
|
||||
assertNull(actualIds.get(ids.get(0)));
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testResolveResourceIdentity_defaultFunctionality(){
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.slf4j.Logger;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.history.Revisions;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
|
@ -410,12 +409,6 @@ public class MdmLinkDaoSvc<P extends IResourcePersistentId<?>, M extends IMdmLin
|
|||
myMdmLinkDao.deleteLinksWithAnyReferenceToPids(theGoldenResourcePids);
|
||||
}
|
||||
|
||||
// TODO: LD: delete for good on the next bump
|
||||
@Deprecated(since = "6.5.7", forRemoval = true)
|
||||
public Revisions<Long, M> findMdmLinkHistory(M mdmLink) {
|
||||
return myMdmLinkDao.findHistory(mdmLink.getId());
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public List<MdmLinkWithRevision<M>> findMdmLinkHistory(MdmHistorySearchParameters theMdmHistorySearchParameters) {
|
||||
return myMdmLinkDao.getHistoryForIds(theMdmHistorySearchParameters);
|
||||
|
|
|
@ -25,7 +25,6 @@ import ca.uhn.fhir.mdm.util.GoldenResourceHelper;
|
|||
import ca.uhn.fhir.mdm.util.MdmPartitionHelper;
|
||||
import ca.uhn.fhir.mdm.util.MdmResourceUtil;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
|
@ -52,9 +51,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
|
|||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.atLeastOnce;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
@ -156,7 +153,7 @@ public class MdmSurvivorshipSvcImplTest {
|
|||
link.setSourceId(patient.getId());
|
||||
link.setGoldenResourceId(goldenPatient.getId());
|
||||
links.add(link);
|
||||
sourceIdToPid.put(patient.getIdElement(), new JpaResourceLookup("Patient", (Long) link.getSourcePid().getId(), null, new PartitionablePartitionId()));
|
||||
sourceIdToPid.put(patient.getIdElement(), new JpaResourceLookup("Patient", patient.getIdPart(), (Long) link.getSourcePid().getId(), null, new PartitionablePartitionId()));
|
||||
}
|
||||
|
||||
IFhirResourceDao resourceDao = mock(IFhirResourceDao.class);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -26,6 +26,8 @@ import java.util.Date;
|
|||
public interface IResourceLookup<T extends IResourcePersistentId<?>> {
|
||||
String getResourceType();
|
||||
|
||||
String getFhirId();
|
||||
|
||||
/**
|
||||
* If the resource is deleted, returns the date/time that the resource was deleted at. Otherwise, returns <code>null</code>
|
||||
*/
|
||||
|
|
|
@ -251,6 +251,11 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
return myResourceType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFhirId() {
|
||||
return getIdDt().getIdPart();
|
||||
}
|
||||
|
||||
public void setResourceType(String theResourceType) {
|
||||
myResourceType = theResourceType;
|
||||
}
|
||||
|
|
|
@ -988,6 +988,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
* @return the resource id, or null if the resource doesn't have a client-assigned id,
|
||||
* and hasn't been saved to the db to get a server-assigned id yet.
|
||||
*/
|
||||
@Override
|
||||
public String getFhirId() {
|
||||
return myFhirId;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -209,7 +209,7 @@ public class FhirResourceDaoDstu2UpdateTest extends BaseJpaDstu2Test {
|
|||
p2.setId(new IdDt("Organization/" + p1id.getIdPart()));
|
||||
myOrganizationDao.update(p2, mySrd);
|
||||
fail("");
|
||||
} catch (UnprocessableEntityException e) {
|
||||
} catch (InvalidRequestException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
|
|
|
@ -102,7 +102,8 @@ public class FhirResourceDaoValueSetDstu2Test extends BaseJpaDstu2Test {
|
|||
CodeableConceptDt codeableConcept = null;
|
||||
IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(valueSetIdentifier, id, code, system, display, coding, codeableConcept, mySrd);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet: http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", result.getMessage());
|
||||
assertThat(result.getMessage()).contains("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7'");
|
||||
assertThat(result.getMessage()).contains("for in-memory expansion");
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
assertEquals(IValidationSupport.IssueSeverity.WARNING, result.getSeverity());
|
||||
}
|
||||
|
|
|
@ -2571,32 +2571,6 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateRejectsInvalidTypes() {
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.addIdentifier().setSystem("urn:system").setValue("testUpdateRejectsInvalidTypes");
|
||||
patient.addName().addFamily("Tester").addGiven("testUpdateRejectsInvalidTypes");
|
||||
IdDt p1id = (IdDt) myClient.create().resource(patient).execute().getId();
|
||||
|
||||
Organization org = new Organization();
|
||||
org.getNameElement().setValue("testUpdateRejectsInvalidTypes");
|
||||
try {
|
||||
myClient.update().resource(org).withId("Organization/" + p1id.getIdPart()).execute();
|
||||
fail("");
|
||||
} catch (UnprocessableEntityException e) {
|
||||
assertThat(e.getMessage()).contains("HAPI-0930: Existing resource ID[Patient/" + p1id.getIdPart() + "] is of type[Patient] - Cannot update with [Organization]");
|
||||
}
|
||||
|
||||
try {
|
||||
myClient.update().resource(org).withId("Patient/" + p1id.getIdPart()).execute();
|
||||
fail("");
|
||||
} catch (UnprocessableEntityException e) {
|
||||
assertThat(e.getMessage()).contains("HAPI-0930: Existing resource ID[Patient/" + p1id.getIdPart() + "] is of type[Patient] - Cannot update with [Organization]");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateResourceConditional() throws IOException {
|
||||
String methodName = "testUpdateResourceConditional";
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
@ -116,6 +117,7 @@ public class SystemProviderTransactionSearchDstu2Test extends BaseJpaDstu2Test {
|
|||
@Test
|
||||
public void testBatchWithGetHardLimitLargeSynchronous() {
|
||||
List<String> ids = create20Patients();
|
||||
ids.sort(Comparator.naturalOrder());
|
||||
|
||||
Bundle input = new Bundle();
|
||||
input.setType(BundleTypeEnum.BATCH);
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
|
@ -33,7 +32,7 @@ import java.util.TimeZone;
|
|||
import java.util.UUID;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
|
||||
|
@ -41,7 +40,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu3UpdateTest.class);
|
||||
|
||||
@AfterEach
|
||||
public void afterEach(){
|
||||
public void afterEach() {
|
||||
myStorageSettings.setResourceServerIdStrategy(JpaStorageSettings.IdStrategyEnum.SEQUENTIAL_NUMERIC);
|
||||
}
|
||||
|
||||
|
@ -135,7 +134,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
public void afterResetDao() {
|
||||
myStorageSettings.setResourceMetaCountHardLimit(new JpaStorageSettings().getResourceMetaCountHardLimit());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testHardMetaCapIsEnforcedOnCreate() {
|
||||
myStorageSettings.setResourceMetaCountHardLimit(3);
|
||||
|
@ -156,7 +155,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testHardMetaCapIsEnforcedOnMetaAdd() {
|
||||
myStorageSettings.setResourceMetaCountHardLimit(3);
|
||||
|
@ -167,7 +166,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
patient.setActive(true);
|
||||
id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
Meta meta = new Meta();
|
||||
meta.addTag().setSystem("http://foo").setCode("1");
|
||||
|
@ -183,7 +182,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDuplicateTagsOnAddTagsIgnored() {
|
||||
IIdType id;
|
||||
|
@ -198,7 +197,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
meta.addTag().setSystem("http://foo").setCode("bar").setDisplay("Val2");
|
||||
meta.addTag().setSystem("http://foo").setCode("bar").setDisplay("Val3");
|
||||
myPatientDao.metaAddOperation(id, meta, null);
|
||||
|
||||
|
||||
// Do a read
|
||||
{
|
||||
Patient patient = myPatientDao.read(id, mySrd);
|
||||
|
@ -228,7 +227,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
patient.getMeta().addTag().setSystem("http://foo").setCode("bar").setDisplay("Val3");
|
||||
myPatientDao.update(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
|
||||
// Do a read on second version
|
||||
{
|
||||
Patient patient = myPatientDao.read(id, mySrd);
|
||||
|
@ -314,7 +313,6 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testUpdateByUrl() {
|
||||
String methodName = "testUpdateByUrl";
|
||||
|
@ -602,8 +600,8 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
p2.setId(new IdType("Organization/" + p1id.getIdPart()));
|
||||
myOrganizationDao.update(p2, mySrd);
|
||||
fail("");
|
||||
} catch (UnprocessableEntityException e) {
|
||||
assertEquals(Msg.code(930) + "Existing resource ID[Patient/" + p1id.getIdPartAsLong() + "] is of type[Patient] - Cannot update with [Organization]", e.getMessage());
|
||||
} catch (InvalidRequestException e) {
|
||||
assertThat(e.getMessage()).contains(Msg.code(960) + "Can not create resource with");
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -611,9 +609,8 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
myOrganizationDao.update(p2, mySrd);
|
||||
fail("");
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals(Msg.code(996) + "Incorrect resource type (Patient) for this DAO, wanted: Organization", e.getMessage());
|
||||
assertThat(e.getMessage()).contains(Msg.code(996) + "Incorrect resource type");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -844,7 +841,7 @@ public class FhirResourceDaoDstu3UpdateTest extends BaseJpaDstu3Test {
|
|||
// verify
|
||||
try {
|
||||
UUID.fromString(result);
|
||||
} catch (IllegalArgumentException exception){
|
||||
} catch (IllegalArgumentException exception) {
|
||||
fail("Result id is not a UUID. Instead, it was: " + result);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ package ca.uhn.fhir.jpa.dao.dstu3;
|
|||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.config.JpaConfig;
|
||||
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
|
||||
import ca.uhn.fhir.jpa.test.BaseJpaDstu3Test;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
|
@ -10,7 +9,6 @@ import ca.uhn.fhir.rest.api.ValidationModeEnum;
|
|||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.test.utilities.ProxyUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.validation.IValidatorModule;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
@ -95,8 +93,6 @@ public class FhirResourceDaoDstu3ValidateTest extends BaseJpaDstu3Test {
|
|||
|
||||
ourLog.info("Clearing cache");
|
||||
myValidationSupport.invalidateCaches();
|
||||
myFhirInstanceValidator.invalidateCaches();
|
||||
ProxyUtil.getSingletonTarget(myPersistedResourceValidationSupport, JpaPersistedResourceValidationSupport.class).clearCaches();
|
||||
|
||||
MethodOutcome result = myQuestionnaireResponseDao.validate(qr, null, null, null, null, null, null);
|
||||
OperationOutcome oo = (OperationOutcome) result.getOperationOutcome();
|
||||
|
|
|
@ -4277,14 +4277,14 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
try {
|
||||
myClient.update().resource(p2).withId("Organization/" + p1id.getIdPart()).execute();
|
||||
fail("");
|
||||
} catch (UnprocessableEntityException e) {
|
||||
} catch (InvalidRequestException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
try {
|
||||
myClient.update().resource(p2).withId("Patient/" + p1id.getIdPart()).execute();
|
||||
fail("");
|
||||
} catch (UnprocessableEntityException e) {
|
||||
} catch (InvalidRequestException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.rp.dstu3.ObservationResourceProvider;
|
||||
|
@ -15,12 +12,9 @@ import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor;
|
|||
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||
import ca.uhn.fhir.test.utilities.JettyUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
|
||||
import org.eclipse.jetty.ee10.servlet.ServletHolder;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.hl7.fhir.dstu3.model.Binary;
|
||||
import org.hl7.fhir.dstu3.model.Bundle;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
|
||||
|
@ -35,17 +29,20 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
||||
public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
|
||||
|
||||
private static RestfulServer myRestServer;
|
||||
private static IGenericClient ourClient;
|
||||
private static FhirContext ourCtx;
|
||||
private static CloseableHttpClient ourHttpClient;
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SystemProviderTransactionSearchDstu3Test.class);
|
||||
private static Server ourServer;
|
||||
private static String ourServerBase;
|
||||
|
@ -97,14 +94,8 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
|
|||
int myPort = JettyUtil.getPortForStartedServer(ourServer);
|
||||
ourServerBase = "http://localhost:" + myPort + "/fhir/context";
|
||||
|
||||
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
|
||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||
builder.setConnectionManager(connectionManager);
|
||||
ourHttpClient = builder.build();
|
||||
|
||||
ourCtx.getRestfulClientFactory().setSocketTimeout(600 * 1000);
|
||||
ourClient = ourCtx.newRestfulGenericClient(ourServerBase);
|
||||
ourClient.setLogRequestAndResponse(true);
|
||||
myRestServer = restServer;
|
||||
}
|
||||
|
||||
|
@ -117,13 +108,13 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
|
|||
|
||||
|
||||
private List<String> create20Patients() {
|
||||
List<String> ids = new ArrayList<String>();
|
||||
List<String> ids = new ArrayList<>();
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Patient patient = new Patient();
|
||||
patient.setGender(AdministrativeGender.MALE);
|
||||
patient.addIdentifier().setSystem("urn:foo").setValue("A");
|
||||
patient.addName().setFamily("abcdefghijklmnopqrstuvwxyz".substring(i, i+1));
|
||||
String id = myPatientDao.create(patient).getId().toUnqualifiedVersionless().getValue();
|
||||
String id = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless().getValue();
|
||||
ids.add(id);
|
||||
}
|
||||
return ids;
|
||||
|
@ -132,6 +123,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
|
|||
@Test
|
||||
public void testBatchWithGetHardLimitLargeSynchronous() {
|
||||
List<String> ids = create20Patients();
|
||||
ids.sort(Comparator.naturalOrder());
|
||||
|
||||
Bundle input = new Bundle();
|
||||
input.setType(BundleType.BATCH);
|
||||
|
@ -227,7 +219,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
|
|||
|
||||
Patient newPt = ourClient.read().resource(Patient.class).withId(pid1.getIdPart()).execute();
|
||||
assertEquals("2", newPt.getIdElement().getVersionIdPart());
|
||||
assertEquals(false, newPt.getActive());
|
||||
assertFalse(newPt.getActive());
|
||||
}
|
||||
|
||||
|
||||
|
@ -336,7 +328,7 @@ public class SystemProviderTransactionSearchDstu3Test extends BaseJpaDstu3Test {
|
|||
}
|
||||
|
||||
private List<String> toIds(Bundle theRespBundle) {
|
||||
ArrayList<String> retVal = new ArrayList<String>();
|
||||
ArrayList<String> retVal = new ArrayList<>();
|
||||
for (BundleEntryComponent next : theRespBundle.getEntry()) {
|
||||
retVal.add(next.getResource().getIdElement().toUnqualifiedVersionless().getValue());
|
||||
}
|
||||
|
|
|
@ -242,7 +242,7 @@ public class TerminologyLoaderSvcIntegrationDstu3Test extends BaseJpaDstu3Test {
|
|||
|
||||
IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(new UriType("http://loinc.org/vs"), null, new StringType("10013-1-9999999999"), new StringType(ITermLoaderSvc.LOINC_URI), null, null, null, mySrd);
|
||||
assertFalse(result.isOk());
|
||||
assertEquals("Unknown code 'http://loinc.org#10013-1-9999999999' for in-memory expansion of ValueSet 'http://loinc.org/vs'", result.getMessage());
|
||||
assertThat(result.getMessage()).contains("Unknown code 'http://loinc.org#10013-1-9999999999' for in-memory expansion");
|
||||
}
|
||||
|
||||
private Set<String> toExpandedCodes(ValueSet theExpanded) {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -191,8 +191,8 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
|
|||
|
||||
String sql = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false);
|
||||
assertThat(sql).satisfiesAnyOf(
|
||||
s -> assertThat(s).contains("rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B' and rt1_0.PARTITION_ID is null or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A' and rt1_0.PARTITION_ID is null"),
|
||||
s -> assertThat(s).contains("rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A' and rt1_0.PARTITION_ID is null or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B' and rt1_0.PARTITION_ID is null")
|
||||
s -> assertThat(s).contains("where rt1_0.PARTITION_ID is null and (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B' or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A')"),
|
||||
s -> assertThat(s).contains("where rt1_0.PARTITION_ID is null and (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A' or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B')")
|
||||
);
|
||||
assertEquals(50, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
|
|
|
@ -57,6 +57,7 @@ import org.mockito.stubbing.Answer;
|
|||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -256,7 +257,13 @@ class BaseHapiFhirResourceDaoTest {
|
|||
// but for now, Mockito will complain, so we'll leave it out
|
||||
|
||||
// test
|
||||
DaoMethodOutcome outcome = mySvc.delete(id, deleteConflicts, requestDetails, transactionDetails);
|
||||
DaoMethodOutcome outcome;
|
||||
try {
|
||||
TransactionSynchronizationManager.setActualTransactionActive(true);
|
||||
outcome = mySvc.delete(id, deleteConflicts, requestDetails, transactionDetails);
|
||||
} finally {
|
||||
TransactionSynchronizationManager.setActualTransactionActive(false);
|
||||
}
|
||||
|
||||
// verify
|
||||
assertNotNull(outcome);
|
||||
|
|
|
@ -1,119 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.sl.cache.Cache;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LOINC_LOW;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.ArgumentMatchers.isA;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class JpaPersistedResourceValidationSupportTest {
|
||||
|
||||
private FhirContext theFhirContext = FhirContext.forR4();
|
||||
|
||||
@Mock private ITermReadSvc myTermReadSvc;
|
||||
@Mock private DaoRegistry myDaoRegistry;
|
||||
@Mock private Cache<String, IBaseResource> myLoadCache;
|
||||
@Mock private IFhirResourceDao<ValueSet> myValueSetResourceDao;
|
||||
|
||||
@InjectMocks
|
||||
private IValidationSupport testedClass =
|
||||
new JpaPersistedResourceValidationSupport(theFhirContext);
|
||||
|
||||
private Class<? extends IBaseResource> myCodeSystemType = CodeSystem.class;
|
||||
private Class<? extends IBaseResource> myValueSetType = ValueSet.class;
|
||||
|
||||
|
||||
@BeforeEach
|
||||
public void setup() {
|
||||
ReflectionTestUtils.setField(testedClass, "myValueSetType", myValueSetType);
|
||||
}
|
||||
|
||||
|
||||
@Nested
|
||||
public class FetchCodeSystemTests {
|
||||
|
||||
@Test
|
||||
void fetchCodeSystemMustUseForcedId() {
|
||||
testedClass.fetchCodeSystem("string-containing-loinc");
|
||||
|
||||
verify(myTermReadSvc, times(1)).readCodeSystemByForcedId(LOINC_LOW);
|
||||
verify(myLoadCache, never()).get(anyString(), isA(Function.class));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void fetchCodeSystemMustNotUseForcedId() {
|
||||
testedClass.fetchCodeSystem("string-not-containing-l-o-i-n-c");
|
||||
|
||||
verify(myTermReadSvc, never()).readCodeSystemByForcedId(LOINC_LOW);
|
||||
verify(myLoadCache, times(1)).get(anyString(), isA(Function.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Nested
|
||||
public class FetchValueSetTests {
|
||||
|
||||
@Test
|
||||
void fetchValueSetMustUseForcedId() {
|
||||
final String valueSetId = "string-containing-loinc";
|
||||
assertNull(testedClass.fetchValueSet(valueSetId));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void fetchValueSetMustNotUseForcedId() {
|
||||
testedClass.fetchValueSet("string-not-containing-l-o-i-n-c");
|
||||
|
||||
verify(myLoadCache, times(1)).get(anyString(), isA(Function.class));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -423,8 +423,7 @@ public class FhirResourceDaoR4ComboNonUniqueParamTest extends BaseComboParamsR4T
|
|||
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
String expected;
|
||||
expected = "select rt1_0.RES_ID,rt1_0.RES_TYPE,rt1_0.FHIR_ID from HFJ_RESOURCE rt1_0 where rt1_0.FHIR_ID='my-org'";
|
||||
assertEquals(expected, myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false));
|
||||
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false)).contains("where (rt1_0.FHIR_ID='my-org')");
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
|
||||
assertThat(sql).contains("SP_VALUE_NORMALIZED LIKE 'FAMILY1%'");
|
||||
assertThat(sql).contains("t1.TARGET_RESOURCE_ID");
|
||||
|
|
|
@ -8,15 +8,19 @@ import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
|
|||
import ca.uhn.fhir.batch2.jobs.chunk.TypedPidJson;
|
||||
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeStep;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.v2.ReindexStepV2;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.batch2.model.WorkChunk;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.ConceptValidationOptions;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.ValidationSupportContext;
|
||||
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
|
||||
|
@ -35,8 +39,11 @@ import ca.uhn.fhir.jpa.subscription.triggering.SubscriptionTriggeringSvcImpl;
|
|||
import ca.uhn.fhir.jpa.term.TermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.test.util.SubscriptionTestUtil;
|
||||
import ca.uhn.fhir.jpa.util.SqlQuery;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.ValidationModeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
|
@ -65,12 +72,14 @@ import org.hl7.fhir.r4.model.Coding;
|
|||
import org.hl7.fhir.r4.model.Coverage;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
import org.hl7.fhir.r4.model.Encounter;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.ExplanationOfBenefit;
|
||||
import org.hl7.fhir.r4.model.Group;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Location;
|
||||
import org.hl7.fhir.r4.model.Narrative;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.OperationOutcome;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Practitioner;
|
||||
|
@ -81,6 +90,7 @@ import org.hl7.fhir.r4.model.QuestionnaireResponse;
|
|||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.ServiceRequest;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.StructureDefinition;
|
||||
import org.hl7.fhir.r4.model.Subscription;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
|
@ -92,6 +102,8 @@ import org.junit.jupiter.api.TestMethodOrder;
|
|||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.mockito.Mock;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Slice;
|
||||
|
@ -148,6 +160,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
public static final HashMapResourceProviderExtension<Patient> ourPatientProvider = new HashMapResourceProviderExtension<>(ourServer, Patient.class);
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4QueryCountTest.class);
|
||||
@Autowired
|
||||
protected SubscriptionTestUtil mySubscriptionTestUtil;
|
||||
@Autowired
|
||||
private ISearchParamPresentDao mySearchParamPresentDao;
|
||||
@Autowired
|
||||
private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc;
|
||||
|
@ -155,9 +169,13 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
private ReindexStepV2 myReindexStep;
|
||||
@Autowired
|
||||
private DeleteExpungeStep myDeleteExpungeStep;
|
||||
@Autowired
|
||||
protected SubscriptionTestUtil mySubscriptionTestUtil;
|
||||
private ReindexTestHelper myReindexTestHelper;
|
||||
@Mock
|
||||
private IJobDataSink<VoidModel> myMockJobDataSinkVoid;
|
||||
@Mock
|
||||
private WorkChunk myMockWorkChunk;
|
||||
@Mock
|
||||
private IJobDataSink<ReindexResults> myMockJobDataSinkReindexResults;
|
||||
|
||||
@AfterEach
|
||||
public void afterResetDao() {
|
||||
|
@ -314,7 +332,6 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* See the class javadoc before changing the counts in this test!
|
||||
*/
|
||||
|
@ -345,7 +362,6 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertThat(myCaptureQueriesListener.getDeleteQueriesForCurrentThread()).isEmpty();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* See the class javadoc before changing the counts in this test!
|
||||
*/
|
||||
|
@ -642,7 +658,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
|
||||
cs.addConcept().setCode("bar-1").setDisplay("Bar 1");
|
||||
cs.addConcept().setCode("bar-2").setDisplay("Bar 2");
|
||||
myCodeSystemDao.create(cs);
|
||||
myCodeSystemDao.create(cs, mySrd);
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(cs));
|
||||
|
||||
Observation obs = new Observation();
|
||||
|
@ -665,11 +681,11 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
fail(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(e.getOperationOutcome()));
|
||||
}
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(10, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(12, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
assertEquals(8, myCaptureQueriesListener.getCommitCount());
|
||||
assertEquals(9, myCaptureQueriesListener.countCommits());
|
||||
|
||||
// Validate again (should rely only on caches)
|
||||
myCaptureQueriesListener.clear();
|
||||
|
@ -682,7 +698,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertThat(myCaptureQueriesListener.getInsertQueriesForCurrentThread()).isEmpty();
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getCommitCount());
|
||||
assertEquals(0, myCaptureQueriesListener.countCommits());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -693,12 +709,12 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
IIdType id = runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.addIdentifier().setSystem("urn:system").setValue("2");
|
||||
return myPatientDao.create(p).getId().toUnqualified();
|
||||
return myPatientDao.create(p, mySrd).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
myPatientDao.read(id.withVersion("1"));
|
||||
myPatientDao.read(id.withVersion("1"), mySrd);
|
||||
});
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(2);
|
||||
|
@ -720,7 +736,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.getMaritalStatus().setText("123");
|
||||
return myPatientDao.create(p).getId().toUnqualified();
|
||||
return myPatientDao.create(p, mySrd).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
@ -729,7 +745,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
Patient p = new Patient();
|
||||
p.setId("AAA");
|
||||
p.getMaritalStatus().setText("123");
|
||||
return myPatientDao.update(p).getId().toUnqualified();
|
||||
return myPatientDao.update(p, mySrd).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
@ -764,7 +780,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
Patient p = new Patient();
|
||||
p.setUserData("ABAB", "ABAB");
|
||||
p.getMaritalStatus().setText("123");
|
||||
return myPatientDao.create(p).getId().toUnqualifiedVersionless();
|
||||
return myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
@ -808,14 +824,14 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
Patient p = new Patient();
|
||||
p.setUserData("ABAB", "ABAB");
|
||||
p.getMaritalStatus().setText("123");
|
||||
return myPatientDao.create(p).getId().toUnqualified();
|
||||
return myPatientDao.create(p, mySrd).getId().toUnqualified();
|
||||
});
|
||||
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId("BBB");
|
||||
p.getMaritalStatus().setText("123");
|
||||
myPatientDao.update(p);
|
||||
myPatientDao.update(p, mySrd);
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
|
@ -824,7 +840,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
Patient p = new Patient();
|
||||
p.setId("AAA");
|
||||
p.getMaritalStatus().setText("123");
|
||||
myPatientDao.update(p);
|
||||
myPatientDao.update(p, mySrd);
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
@ -919,13 +935,11 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
.map(t -> new TypedPidJson(t.getResourceType(), Long.toString(t.getResourceId())))
|
||||
.collect(Collectors.toList()));
|
||||
|
||||
runInTransaction(()-> assertEquals(10, myResourceTableDao.count()));
|
||||
|
||||
IJobDataSink<VoidModel> sink = mock(IJobDataSink.class);
|
||||
runInTransaction(() -> assertEquals(10, myResourceTableDao.count()));
|
||||
|
||||
// Test
|
||||
myCaptureQueriesListener.clear();
|
||||
RunOutcome outcome = myDeleteExpungeStep.doDeleteExpunge(new ResourceIdListWorkChunkJson(pids, null), sink, "instance-id", "chunk-id", false, null);
|
||||
RunOutcome outcome = myDeleteExpungeStep.doDeleteExpunge(new ResourceIdListWorkChunkJson(pids, null), myMockJobDataSinkVoid, "instance-id", "chunk-id", false, null);
|
||||
|
||||
// Verify
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
|
@ -933,7 +947,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(29, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(10, outcome.getRecordsProcessed());
|
||||
runInTransaction(()-> assertEquals(0, myResourceTableDao.count()));
|
||||
runInTransaction(() -> assertEquals(0, myResourceTableDao.count()));
|
||||
}
|
||||
|
||||
|
||||
|
@ -1128,9 +1142,9 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
params,
|
||||
data,
|
||||
instance,
|
||||
mock(WorkChunk.class)
|
||||
myMockWorkChunk
|
||||
);
|
||||
RunOutcome outcome = myReindexStep.run(stepExecutionDetails, mock(IJobDataSink.class));
|
||||
RunOutcome outcome = myReindexStep.run(stepExecutionDetails, myMockJobDataSinkReindexResults);
|
||||
|
||||
// validate
|
||||
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(theExpectedSelectCount);
|
||||
|
@ -1142,7 +1156,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
@Test
|
||||
public void testReindexJob_ComboParamIndexesInUse() {
|
||||
myStorageSettings.setUniqueIndexesEnabled(true);
|
||||
myStorageSettings.setUniqueIndexesEnabled(true);
|
||||
myReindexTestHelper.createUniqueCodeSearchParameter();
|
||||
myReindexTestHelper.createNonUniqueStatusAndCodeSearchParameter();
|
||||
|
||||
|
@ -1152,38 +1166,38 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
transactionResonse
|
||||
.getEntry()
|
||||
.stream()
|
||||
.map(t->new IdType(t.getResponse().getLocation()))
|
||||
.forEach(t->data.addTypedPid("Observation", t.getIdPartAsLong()));
|
||||
.map(t -> new IdType(t.getResponse().getLocation()))
|
||||
.forEach(t -> data.addTypedPid("Observation", t.getIdPartAsLong()));
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(24L, myResourceTableDao.count());
|
||||
assertEquals(20L, myResourceIndexedComboStringUniqueDao.count());
|
||||
assertEquals(20L, myResourceIndexedComboTokensNonUniqueDao.count());
|
||||
});
|
||||
runInTransaction(() -> {
|
||||
assertEquals(24L, myResourceTableDao.count());
|
||||
assertEquals(20L, myResourceIndexedComboStringUniqueDao.count());
|
||||
assertEquals(20L, myResourceIndexedComboTokensNonUniqueDao.count());
|
||||
});
|
||||
|
||||
ReindexJobParameters params = new ReindexJobParameters()
|
||||
.setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.NONE)
|
||||
.setReindexSearchParameters(ReindexParameters.ReindexSearchParametersEnum.ALL)
|
||||
.setOptimisticLock(false);
|
||||
ReindexJobParameters params = new ReindexJobParameters()
|
||||
.setOptimizeStorage(ReindexParameters.OptimizeStorageModeEnum.NONE)
|
||||
.setReindexSearchParameters(ReindexParameters.ReindexSearchParametersEnum.ALL)
|
||||
.setOptimisticLock(false);
|
||||
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
// execute
|
||||
myCaptureQueriesListener.clear();
|
||||
JobInstance instance = new JobInstance();
|
||||
StepExecutionDetails<ReindexJobParameters, ResourceIdListWorkChunkJson> stepExecutionDetails = new StepExecutionDetails<>(
|
||||
params,
|
||||
data,
|
||||
instance,
|
||||
mock(WorkChunk.class)
|
||||
myMockWorkChunk
|
||||
);
|
||||
RunOutcome outcome = myReindexStep.run(stepExecutionDetails, mock(IJobDataSink.class));
|
||||
RunOutcome outcome = myReindexStep.run(stepExecutionDetails, myMockJobDataSinkReindexResults);
|
||||
assertEquals(20, outcome.getRecordsProcessed());
|
||||
|
||||
// validate
|
||||
assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
// validate
|
||||
assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
public void assertNoPartitionSelectors() {
|
||||
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
|
||||
|
@ -1460,7 +1474,6 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* See the class javadoc before changing the counts in this test!
|
||||
*/
|
||||
|
@ -3405,7 +3418,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
@Test
|
||||
public void testTriggerSubscription_Sync() throws Exception {
|
||||
// Setup
|
||||
IntStream.range(0, 200).forEach(i->createAPatient());
|
||||
IntStream.range(0, 200).forEach(i -> createAPatient());
|
||||
|
||||
mySubscriptionTestUtil.registerRestHookInterceptor();
|
||||
ForceOffsetSearchModeInterceptor interceptor = new ForceOffsetSearchModeInterceptor();
|
||||
|
@ -3417,7 +3430,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
waitForActivatedSubscriptionCount(1);
|
||||
|
||||
mySubscriptionTriggeringSvc.triggerSubscription(null, List.of(new StringType("Patient?")), subscriptionId, mySrd);
|
||||
mySubscriptionTriggeringSvc.triggerSubscription(null, List.of(new StringType("Patient?")), subscriptionId, mySrd);
|
||||
|
||||
// Test
|
||||
myCaptureQueriesListener.clear();
|
||||
|
@ -3443,7 +3456,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
@Test
|
||||
public void testTriggerSubscription_Async() throws Exception {
|
||||
// Setup
|
||||
IntStream.range(0, 200).forEach(i->createAPatient());
|
||||
IntStream.range(0, 200).forEach(i -> createAPatient());
|
||||
|
||||
mySubscriptionTestUtil.registerRestHookInterceptor();
|
||||
|
||||
|
@ -3804,7 +3817,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
runInTransaction(()->{
|
||||
runInTransaction(() -> {
|
||||
ResourceTable version = myResourceTableDao.findById(patientId.getIdPartAsLong()).orElseThrow();
|
||||
assertFalse(version.isParamsTokenPopulated());
|
||||
assertFalse(version.isHasLinks());
|
||||
|
@ -3825,7 +3838,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
Observation observation = new Observation().setStatus(Observation.ObservationStatus.FINAL).addCategory(new CodeableConcept().addCoding(new Coding("http://category-type", "12345", null))).setCode(new CodeableConcept().addCoding(new Coding("http://coverage-type", "12345", null)));
|
||||
|
||||
IIdType idDt = myObservationDao.create(observation, mySrd).getEntity().getIdDt();
|
||||
runInTransaction(()->{
|
||||
runInTransaction(() -> {
|
||||
assertEquals(4, myResourceIndexedSearchParamTokenDao.count());
|
||||
ResourceTable version = myResourceTableDao.findById(idDt.getIdPartAsLong()).orElseThrow();
|
||||
assertTrue(version.isParamsTokenPopulated());
|
||||
|
@ -3837,13 +3850,217 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
// then
|
||||
assertQueryCount(3, 1, 1, 2);
|
||||
runInTransaction(()->{
|
||||
runInTransaction(() -> {
|
||||
assertEquals(0, myResourceIndexedSearchParamTokenDao.count());
|
||||
ResourceTable version = myResourceTableDao.findById(idDt.getIdPartAsLong()).orElseThrow();
|
||||
assertFalse(version.isParamsTokenPopulated());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchStructureDefinition_BuiltIn() {
|
||||
|
||||
// First pass with an empty cache
|
||||
myValidationSupport.invalidateCaches();
|
||||
myCaptureQueriesListener.clear();
|
||||
assertNotNull(myValidationSupport.fetchStructureDefinition("http://hl7.org/fhir/StructureDefinition/Patient"));
|
||||
|
||||
assertEquals(0, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Again (should use cache)
|
||||
myCaptureQueriesListener.clear();
|
||||
assertNotNull(myValidationSupport.fetchStructureDefinition("http://hl7.org/fhir/StructureDefinition/Patient"));
|
||||
|
||||
assertEquals(0, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchStructureDefinition_StoredInRepository() {
|
||||
|
||||
StructureDefinition sd = new StructureDefinition();
|
||||
sd.setUrl("http://foo");
|
||||
myStructureDefinitionDao.create(sd, mySrd);
|
||||
|
||||
// First pass with an empty cache
|
||||
myValidationSupport.invalidateCaches();
|
||||
myCaptureQueriesListener.clear();
|
||||
assertNotNull(myValidationSupport.fetchStructureDefinition("http://foo"));
|
||||
|
||||
assertEquals(1, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Again (should use cache)
|
||||
myCaptureQueriesListener.clear();
|
||||
assertNotNull(myValidationSupport.fetchStructureDefinition("http://foo"));
|
||||
|
||||
assertEquals(0, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
public void testValidateResource(boolean theStoredInRepository) {
|
||||
Patient resource = new Patient();
|
||||
resource.setGender(Enumerations.AdministrativeGender.MALE);
|
||||
resource.getText().setStatus(Narrative.NarrativeStatus.GENERATED).setDivAsString("<div>hello</div>");
|
||||
String encoded;
|
||||
|
||||
IIdType id = null;
|
||||
int initialAdditionalSelects = 0;
|
||||
if (theStoredInRepository) {
|
||||
id = myPatientDao.create(resource, mySrd).getId();
|
||||
resource = null;
|
||||
encoded = null;
|
||||
initialAdditionalSelects = 1;
|
||||
} else {
|
||||
resource.setId("A");
|
||||
encoded = myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(resource);
|
||||
}
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
ValidationModeEnum mode = ValidationModeEnum.UPDATE;
|
||||
MethodOutcome outcome = myPatientDao.validate(resource, id, encoded, EncodingEnum.JSON, mode, null, mySrd);
|
||||
assertThat(((OperationOutcome)outcome.getOperationOutcome()).getIssueFirstRep().getDiagnostics()).contains("No issues detected");
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
if (theStoredInRepository) {
|
||||
assertEquals(7, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueries());
|
||||
} else {
|
||||
assertEquals(6, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueries());
|
||||
}
|
||||
|
||||
// Again (should use caches)
|
||||
myCaptureQueriesListener.clear();
|
||||
outcome = myPatientDao.validate(resource, id, encoded, EncodingEnum.JSON, mode, null, mySrd);
|
||||
assertThat(((OperationOutcome)outcome.getOperationOutcome()).getIssueFirstRep().getDiagnostics()).contains("No issues detected");
|
||||
if (theStoredInRepository) {
|
||||
assertEquals(1, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
} else {
|
||||
assertEquals(0, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testValidateCode_BuiltIn() {
|
||||
|
||||
// First pass with an empty cache
|
||||
myValidationSupport.invalidateCaches();
|
||||
myCaptureQueriesListener.clear();
|
||||
ValidationSupportContext ctx = new ValidationSupportContext(myValidationSupport);
|
||||
ConceptValidationOptions options = new ConceptValidationOptions();
|
||||
String vsUrl = "http://hl7.org/fhir/ValueSet/marital-status";
|
||||
String csUrl = "http://terminology.hl7.org/CodeSystem/v3-MaritalStatus";
|
||||
String code = "I";
|
||||
String code2 = "A";
|
||||
assertTrue(myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl).isOk());
|
||||
|
||||
assertEquals(1, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Again (should use cache)
|
||||
myCaptureQueriesListener.clear();
|
||||
assertTrue(myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl).isOk());
|
||||
assertEquals(0, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Different code (should use cache)
|
||||
myCaptureQueriesListener.clear();
|
||||
assertTrue(myValidationSupport.validateCode(ctx, options, csUrl, code2, null, vsUrl).isOk());
|
||||
assertEquals(0, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidateCode_StoredInRepository() {
|
||||
String vsUrl = "http://vs";
|
||||
String csUrl = "http://cs";
|
||||
String code = "A";
|
||||
String code2 = "B";
|
||||
|
||||
CodeSystem cs = new CodeSystem();
|
||||
cs.setUrl(csUrl);
|
||||
cs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
|
||||
cs.addConcept().setCode(code);
|
||||
cs.addConcept().setCode(code2);
|
||||
myCodeSystemDao.create(cs, mySrd);
|
||||
|
||||
ValueSet vs = new ValueSet();
|
||||
vs.setUrl(vsUrl);
|
||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
vs.getCompose().addInclude().setSystem(csUrl);
|
||||
myValueSetDao.create(vs, mySrd);
|
||||
IValidationSupport.CodeValidationResult result;
|
||||
|
||||
// First pass with an empty cache
|
||||
myValidationSupport.invalidateCaches();
|
||||
myCaptureQueriesListener.clear();
|
||||
ValidationSupportContext ctx = new ValidationSupportContext(myValidationSupport);
|
||||
ConceptValidationOptions options = new ConceptValidationOptions();
|
||||
result = myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl);
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isOk());
|
||||
assertThat(result.getMessage()).isNull();
|
||||
|
||||
assertEquals(4, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(8, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
// Again (should use cache)
|
||||
myCaptureQueriesListener.clear();
|
||||
result = myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl);
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isOk());
|
||||
assertThat(result.getMessage()).isNull();
|
||||
assertEquals(0, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Different code (should use cache)
|
||||
myCaptureQueriesListener.clear();
|
||||
result = myValidationSupport.validateCode(ctx, options, csUrl, code2, null, vsUrl);
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals(1, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Now pre-expand the VS and try again (should use disk because we're fetching from pre-expansion)
|
||||
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
|
||||
myCaptureQueriesListener.clear();
|
||||
result = myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl);
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isOk());
|
||||
assertThat(result.getMessage()).contains("expansion that was pre-calculated");
|
||||
assertEquals(3, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(7, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Same code (should use cache)
|
||||
myCaptureQueriesListener.clear();
|
||||
result = myValidationSupport.validateCode(ctx, options, csUrl, code, null, vsUrl);
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isOk());
|
||||
assertThat(result.getMessage()).contains("expansion that was pre-calculated");
|
||||
assertEquals(0, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
// Different code (should use cache)
|
||||
myCaptureQueriesListener.clear();
|
||||
result = myValidationSupport.validateCode(ctx, options, csUrl, code2, null, vsUrl);
|
||||
assertNotNull(result);
|
||||
assertTrue(result.isOk());
|
||||
assertThat(result.getMessage()).contains("expansion that was pre-calculated");
|
||||
assertEquals(1, myCaptureQueriesListener.countGetConnections());
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void assertQueryCount(int theExpectedSelectCount, int theExpectedUpdateCount, int theExpectedInsertCount, int theExpectedDeleteCount) {
|
||||
|
||||
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(theExpectedSelectCount);
|
||||
|
|
|
@ -828,7 +828,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
|
||||
String resultingQueryNotFormatted = queries.get(0);
|
||||
assertThat(StringUtils.countMatches(resultingQueryNotFormatted, "Patient.managingOrganization")).as(resultingQueryNotFormatted).isEqualTo(1);
|
||||
assertThat(resultingQueryNotFormatted).contains("TARGET_RESOURCE_ID IN ('" + ids.get(0) + "','" + ids.get(1) + "','" + ids.get(2) + "','" + ids.get(3) + "','" + ids.get(4) + "')");
|
||||
assertThat(resultingQueryNotFormatted).matches("^SELECT .* WHERE .*TARGET_RESOURCE_ID IN \\(.*\\)$");
|
||||
|
||||
// Ensure that the search actually worked
|
||||
assertEquals(5, search.size().intValue());
|
||||
|
@ -891,7 +891,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
assertThat(selectQuery).contains("where rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B'");
|
||||
assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B')");
|
||||
selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
|
||||
assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A')");
|
||||
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
|
@ -910,7 +910,10 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
assertThat(outcome.getResources(0, 999)).hasSize(2);
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
|
||||
assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A' or rt1_0.RES_ID='" + obs2id + "')");
|
||||
assertThat(selectQuery).containsAnyOf(
|
||||
"where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A' or rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='" + obs2id + "')",
|
||||
"where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='" + obs2id + "' or rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A')"
|
||||
);
|
||||
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
}
|
||||
|
||||
|
|
|
@ -996,7 +996,7 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
|||
p2.setId(new IdType("Organization/" + p1id.getIdPart()));
|
||||
myOrganizationDao.update(p2, mySrd);
|
||||
fail();
|
||||
} catch (UnprocessableEntityException e) {
|
||||
} catch (InvalidRequestException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
@ -150,7 +151,7 @@ public class FhirResourceDaoR4ValueSetTest extends BaseJpaR4Test {
|
|||
outcome = myValidationSupport.validateCode(ctx, options, "http://cs", "childX", null, "http://vs");
|
||||
assertNotNull(outcome);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code 'http://cs#childX' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Unknown code 'http://cs#childX' for in-memory expansion of ValueSet 'http://vs'");
|
||||
|
||||
// In memory - Enumerated in non-present CS
|
||||
|
||||
|
@ -162,7 +163,7 @@ public class FhirResourceDaoR4ValueSetTest extends BaseJpaR4Test {
|
|||
outcome = myValidationSupport.validateCode(ctx, options, "http://cs-np", "codeX", null, "http://vs");
|
||||
assertNotNull(outcome);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code 'http://cs-np#codeX' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Unknown code 'http://cs-np#codeX' for in-memory expansion of ValueSet 'http://vs'");
|
||||
|
||||
// Precalculated
|
||||
|
||||
|
@ -253,7 +254,7 @@ public class FhirResourceDaoR4ValueSetTest extends BaseJpaR4Test {
|
|||
outcome = myValidationSupport.validateCode(ctx, options, "http://cs", "childX", null, "http://vs");
|
||||
assertNotNull(outcome);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code 'http://cs#childX' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Unknown code 'http://cs#childX' for in-memory expansion of ValueSet 'http://vs'");
|
||||
|
||||
// In memory - Enumerated in non-present CS
|
||||
|
||||
|
@ -265,7 +266,7 @@ public class FhirResourceDaoR4ValueSetTest extends BaseJpaR4Test {
|
|||
outcome = myValidationSupport.validateCode(ctx, options, "http://cs-np", "codeX", null, "http://vs");
|
||||
assertNotNull(outcome);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code 'http://cs-np#codeX' for in-memory expansion of ValueSet 'http://vs'", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Unknown code 'http://cs-np#codeX' for in-memory expansion of ValueSet 'http://vs'");
|
||||
|
||||
// Precalculated
|
||||
|
||||
|
@ -343,7 +344,7 @@ public class FhirResourceDaoR4ValueSetTest extends BaseJpaR4Test {
|
|||
IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(valueSetIdentifier, id, code, system, display, coding, codeableConcept, mySrd);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
assertEquals("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet: http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", result.getMessage());
|
||||
assertThat(result.getMessage()).contains("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet 'http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2'");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -551,9 +552,16 @@ public class FhirResourceDaoR4ValueSetTest extends BaseJpaR4Test {
|
|||
vsInclude.addConcept().setCode("28571000087109").setDisplay("MODERNA COVID-19 mRNA-1273");
|
||||
myValueSetDao.update(vs);
|
||||
|
||||
TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(true);
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
|
||||
|
||||
myCaptureQueriesListener.clear();;
|
||||
IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(null, new IdType("ValueSet/vaccinecode"), new CodeType("28571000087109"), new CodeType("http://snomed.info/sct"), null, null, null, mySrd);
|
||||
assertTrue(outcome.isOk());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(9, myCaptureQueriesListener.countSelectQueries(), ()->myCaptureQueriesListener.getSelectQueries().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n")));
|
||||
assertThat(outcome.getMessage()).contains("Code validation occurred using a ValueSet expansion that was pre-calculated");
|
||||
assertTrue(outcome.isOk(), outcome.getMessage());
|
||||
outcome = myTermSvc.validateCodeInValueSet(
|
||||
new ValidationSupportContext(myValidationSupport),
|
||||
new ConceptValidationOptions(),
|
||||
|
@ -563,10 +571,6 @@ public class FhirResourceDaoR4ValueSetTest extends BaseJpaR4Test {
|
|||
vs
|
||||
);
|
||||
assertTrue(outcome.isOk());
|
||||
|
||||
ValueSet expansion = myValueSetDao.expand(new IdType("ValueSet/vaccinecode"), new ValueSetExpansionOptions(), mySrd);
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion));
|
||||
|
||||
}
|
||||
|
||||
/** See #4449 */
|
||||
|
|
|
@ -257,7 +257,7 @@ public class RemoteTerminologyServiceJpaR4Test extends BaseJpaR4Test {
|
|||
|
||||
// Verify 1
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
Assertions.assertEquals(4, myCaptureQueriesListener.countGetConnections());
|
||||
Assertions.assertEquals(3, myCaptureQueriesListener.countGetConnections());
|
||||
assertThat(ourValueSetProvider.mySearchUrls).asList().isEmpty();
|
||||
assertThat(ourCodeSystemProvider.mySearchUrls).asList().isEmpty();
|
||||
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
|
@ -18,20 +19,17 @@ import org.hl7.fhir.r4.model.Patient;
|
|||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(HookInterceptorR4Test.class);
|
||||
|
||||
@Autowired
|
||||
IIdHelperService<JpaPid> myIdHelperService;
|
||||
|
@ -116,8 +114,8 @@ public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
IIdType savedPatientId = myClient.create().resource(new Patient()).execute().getId();
|
||||
|
||||
runInTransaction(() -> {
|
||||
List<JpaPid> pids = myIdHelperService.resolveResourcePersistentIdsWithCache(null,
|
||||
Collections.singletonList(savedPatientId));
|
||||
List<JpaPid> pids = myIdHelperService.resolveResourcePids(RequestPartitionId.allPartitions(),
|
||||
Collections.singletonList(savedPatientId), ResolveIdentityMode.includeDeleted().cacheOk());
|
||||
Long savedPatientPid = pids.get(0).getId();
|
||||
assertEquals(savedPatientPid.longValue(), pid.get());
|
||||
});
|
||||
|
@ -133,7 +131,9 @@ public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
pid.set(resourcePid);
|
||||
});
|
||||
IIdType savedPatientId = myClient.create().resource(new Patient()).execute().getId();
|
||||
Long savedPatientPid = runInTransaction(() -> myIdHelperService.resolveResourcePersistentIdsWithCache(null, Collections.singletonList(savedPatientId)).get(0).getId());
|
||||
Long savedPatientPid = runInTransaction(() ->
|
||||
myIdHelperService.resolveResourceIdentityPid(RequestPartitionId.allPartitions(), savedPatientId.getResourceType(), savedPatientId.getIdPart(), ResolveIdentityMode.includeDeleted().cacheOk()).getId()
|
||||
);
|
||||
|
||||
myClient.delete().resourceById(savedPatientId).execute();
|
||||
Parameters parameters = new Parameters();
|
||||
|
@ -170,7 +170,9 @@ public class HookInterceptorR4Test extends BaseResourceProviderR4Test {
|
|||
patient.setActive(true);
|
||||
myClient.update().resource(patient).execute();
|
||||
runInTransaction(() -> {
|
||||
Long savedPatientPid = myIdHelperService.resolveResourcePersistentIdsWithCache(null, Collections.singletonList(savedPatientId)).get(0).getId();
|
||||
Long savedPatientPid = runInTransaction(() ->
|
||||
myIdHelperService.resolveResourceIdentityPid(RequestPartitionId.allPartitions(), savedPatientId.getResourceType(), savedPatientId.getIdPart(), ResolveIdentityMode.includeDeleted().cacheOk()).getId()
|
||||
);
|
||||
assertEquals(savedPatientPid.longValue(), pidOld.get());
|
||||
assertEquals(savedPatientPid.longValue(), pidNew.get());
|
||||
});
|
||||
|
|
|
@ -5810,14 +5810,14 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
|
|||
try {
|
||||
myClient.update().resource(p2).withId("Organization/" + p1id.getIdPart()).execute();
|
||||
fail();
|
||||
} catch (UnprocessableEntityException e) {
|
||||
} catch (InvalidRequestException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
try {
|
||||
myClient.update().resource(p2).withId("Patient/" + p1id.getIdPart()).execute();
|
||||
fail();
|
||||
} catch (UnprocessableEntityException e) {
|
||||
} catch (InvalidRequestException e) {
|
||||
// good
|
||||
}
|
||||
|
||||
|
|
|
@ -1069,9 +1069,10 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv
|
|||
.withParameter(Parameters.class, "url", new UrlType(URL_MY_VALUE_SET))
|
||||
.returnResourceType(ValueSet.class)
|
||||
.execute();
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion));
|
||||
assertThat(toDirectCodes(expansion.getExpansion().getContains())).containsExactlyInAnyOrder("A", "AA", "AB", "AAA");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().size()).as(() -> myCaptureQueriesListener.logSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.joining("\n * "))).isEqualTo(14);
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().size()).as(() -> myCaptureQueriesListener.logSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.joining("\n * "))).isEqualTo(18);
|
||||
assertEquals("ValueSet \"ValueSet.url[http://example.com/my_value_set]\" has not yet been pre-expanded. Performing in-memory expansion without parameters. Current status: NOT_EXPANDED | The ValueSet is waiting to be picked up and pre-expanded by a scheduled task.", expansion.getMeta().getExtensionString(EXT_VALUESET_EXPANSION_MESSAGE));
|
||||
|
||||
// Hierarchical
|
||||
|
@ -1111,7 +1112,7 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv
|
|||
.execute();
|
||||
ourLog.debug(myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(expansion));
|
||||
assertThat(toDirectCodes(expansion.getExpansion().getContains())).containsExactlyInAnyOrder("A", "AA", "AB", "AAA");
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().size()).as(() -> myCaptureQueriesListener.logSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.joining("\n * "))).isEqualTo(10);
|
||||
assertThat(myCaptureQueriesListener.getSelectQueries().size()).as(() -> myCaptureQueriesListener.logSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.joining("\n * "))).isEqualTo(12);
|
||||
assertEquals("ValueSet with URL \"Unidentified ValueSet\" was expanded using an in-memory expansion", expansion.getMeta().getExtensionString(EXT_VALUESET_EXPANSION_MESSAGE));
|
||||
|
||||
// Hierarchical
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -146,6 +147,7 @@ public class SystemProviderTransactionSearchR4Test extends BaseJpaR4Test {
|
|||
@Test
|
||||
public void testBatchWithGetHardLimitLargeSynchronous() {
|
||||
List<String> ids = create20Patients();
|
||||
ids.sort(Comparator.naturalOrder());
|
||||
|
||||
Bundle input = new Bundle();
|
||||
input.setType(BundleType.BATCH);
|
||||
|
|
|
@ -1,15 +1,9 @@
|
|||
package ca.uhn.fhir.jpa.subscription.resthook;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test;
|
||||
import ca.uhn.fhir.jpa.model.config.SubscriptionSettings;
|
||||
import ca.uhn.fhir.jpa.subscription.submit.svc.ResourceModifiedSubmitterSvc;
|
||||
import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test;
|
||||
import ca.uhn.fhir.jpa.test.util.StoppableSubscriptionDeliveringRestHookSubscriber;
|
||||
import ca.uhn.fhir.jpa.topic.SubscriptionTopicDispatcher;
|
||||
import ca.uhn.fhir.jpa.topic.SubscriptionTopicRegistry;
|
||||
|
@ -49,6 +43,7 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -57,9 +52,15 @@ import java.util.concurrent.TimeUnit;
|
|||
import static ca.uhn.fhir.rest.api.Constants.CT_FHIR_JSON_NEW;
|
||||
import static ca.uhn.fhir.util.HapiExtensions.EX_SEND_DELETE_MESSAGES;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
/**
|
||||
* Test the rest-hook subscriptions
|
||||
|
@ -69,9 +70,6 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
public static final String TEST_PATIENT_ID = "topic-test-patient-id";
|
||||
public static final String PATIENT_REFERENCE = "Patient/" + TEST_PATIENT_ID;
|
||||
|
||||
@Autowired
|
||||
ResourceModifiedSubmitterSvc myResourceModifiedSubmitterSvc;
|
||||
|
||||
@Autowired
|
||||
StoppableSubscriptionDeliveringRestHookSubscriber myStoppableSubscriptionDeliveringRestHookSubscriber;
|
||||
@Autowired(required = false)
|
||||
|
@ -158,7 +156,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
*/
|
||||
|
||||
Observation obs = sendObservation(code, "SNOMED-CT", "http://source-system.com", null);
|
||||
obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless());
|
||||
obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless(), mySrd);
|
||||
|
||||
// Should see 1 subscription notification
|
||||
waitForQueueToDrain();
|
||||
|
@ -178,8 +176,8 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
|
||||
obs.getIdentifierFirstRep().setSystem("foo").setValue("2");
|
||||
obs.getMeta().setSource("http://other-source");
|
||||
myObservationDao.update(obs);
|
||||
obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless());
|
||||
myObservationDao.update(obs, mySrd);
|
||||
obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless(), mySrd);
|
||||
|
||||
// Should see 1 subscription notification
|
||||
waitForQueueToDrain();
|
||||
|
@ -250,7 +248,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
bundle.addEntry().setResource(observation).getRequest().setMethod(Bundle.HTTPVerb.POST).setUrl("Observation");
|
||||
Bundle responseBundle = mySystemDao.transaction(null, bundle);
|
||||
|
||||
Observation obs = myObservationDao.read(new IdType(responseBundle.getEntry().get(0).getResponse().getLocation()));
|
||||
Observation obs = myObservationDao.read(new IdType(responseBundle.getEntry().get(0).getResponse().getLocation()), mySrd);
|
||||
|
||||
// Should see 1 subscription notification
|
||||
waitForQueueToDrain();
|
||||
|
@ -275,7 +273,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
bundle.setType(Bundle.BundleType.TRANSACTION);
|
||||
bundle.addEntry().setResource(observation).getRequest().setMethod(Bundle.HTTPVerb.PUT).setUrl(obs.getIdElement().toUnqualifiedVersionless().getValue());
|
||||
mySystemDao.transaction(null, bundle);
|
||||
obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless());
|
||||
obs = myObservationDao.read(obs.getIdElement().toUnqualifiedVersionless(), mySrd);
|
||||
|
||||
// Should see 1 subscription notification
|
||||
waitForQueueToDrain();
|
||||
|
@ -303,7 +301,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
observation.getIdentifierFirstRep().setSystem("foo").setValue("ID" + i);
|
||||
observation.getCode().addCoding().setCode(code).setSystem("SNOMED-CT");
|
||||
observation.setStatus(Observation.ObservationStatus.FINAL);
|
||||
myObservationDao.create(observation);
|
||||
myObservationDao.create(observation, mySrd);
|
||||
}
|
||||
|
||||
ourObservationProvider.waitForUpdateCount(100);
|
||||
|
@ -332,7 +330,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
observation.getIdentifierFirstRep().setSystem("foo").setValue("ID");
|
||||
observation.getCode().addCoding().setCode(code).setSystem("SNOMED-CT");
|
||||
observation.setStatus(Observation.ObservationStatus.FINAL);
|
||||
myObservationDao.create(observation);
|
||||
myObservationDao.create(observation, mySrd);
|
||||
|
||||
ourObservationProvider.waitForUpdateCount(1);
|
||||
}
|
||||
|
@ -376,7 +374,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
obs.setId(obs.getIdElement().toUnqualifiedVersionless());
|
||||
myClient.meta().add().onResource(obs.getIdElement()).meta(new Meta().addTag("http://blah", "blah", null)).execute();
|
||||
|
||||
obs = myClient.read().resource(Observation.class).withId(obs.getIdElement().toUnqualifiedVersionless()).execute();
|
||||
obs = myClient.read().resource(Observation.class).withId(obs.getIdElement().toVersionless()).execute();
|
||||
Coding tag = obs.getMeta().getTag("http://blah", "blah");
|
||||
assertNotNull(tag);
|
||||
|
||||
|
@ -569,8 +567,8 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(2);
|
||||
|
||||
Observation observation1 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("1")).findFirst().orElseThrow(()->new IllegalArgumentException());
|
||||
Observation observation2 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("2")).findFirst().orElseThrow(()->new IllegalArgumentException());
|
||||
Observation observation1 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("1")).findFirst().orElseThrow(IllegalArgumentException::new);
|
||||
Observation observation2 = ourObservationProvider.getResourceUpdates().stream().filter(t->t.getIdElement().getVersionIdPart().equals("2")).findFirst().orElseThrow(IllegalArgumentException::new);
|
||||
|
||||
assertEquals("1", observation1.getIdElement().getVersionIdPart());
|
||||
assertNull(observation1.getNoteFirstRep().getText());
|
||||
|
@ -598,7 +596,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
ourObservationProvider.waitForUpdateCount(1);
|
||||
|
||||
ourLog.info("** About to delete observation");
|
||||
myObservationDao.delete(IdDt.of(observation).toUnqualifiedVersionless());
|
||||
myObservationDao.delete(Objects.requireNonNull(IdDt.of(observation)).toUnqualifiedVersionless(), mySrd);
|
||||
ourObservationProvider.waitForDeleteCount(1);
|
||||
}
|
||||
|
||||
|
@ -674,7 +672,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
|
||||
assertEquals("1", ourObservationProvider.getStoredResources().get(0).getIdElement().getVersionIdPart());
|
||||
|
||||
Subscription subscriptionTemp = myClient.read(Subscription.class, subscription2.getId());
|
||||
Subscription subscriptionTemp = myClient.read().resource(Subscription.class).withId(subscription2.getId()).execute();
|
||||
assertNotNull(subscriptionTemp);
|
||||
|
||||
subscriptionTemp.setCriteria(criteria1);
|
||||
|
@ -698,7 +696,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(4);
|
||||
|
||||
Observation observation3 = myClient.read(Observation.class, observationTemp3.getId());
|
||||
Observation observation3 = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute();
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
observation3.setCode(codeableConcept);
|
||||
Coding coding = codeableConcept.addCoding();
|
||||
|
@ -711,21 +709,21 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(4);
|
||||
|
||||
Observation observation3a = myClient.read(Observation.class, observationTemp3.getId());
|
||||
Observation observation3a = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute();
|
||||
|
||||
CodeableConcept codeableConcept1 = new CodeableConcept();
|
||||
observation3a.setCode(codeableConcept1);
|
||||
Coding coding1 = codeableConcept1.addCoding();
|
||||
coding1.setCode(code);
|
||||
coding1.setSystem("SNOMED-CT");
|
||||
myClient.update().resource(observation3a).withId(observation3a.getIdElement()).execute();
|
||||
myClient.update().resource(observation3a).withId(observation3a.getIdElement().toUnqualifiedVersionless()).execute();
|
||||
|
||||
// Should see only one subscription notification
|
||||
waitForQueueToDrain();
|
||||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(5);
|
||||
|
||||
assertFalse(subscription1.getId().equals(subscription2.getId()));
|
||||
assertNotEquals(subscription1.getId(), subscription2.getId());
|
||||
assertThat(observation1.getId()).isNotEmpty();
|
||||
assertThat(observation2.getId()).isNotEmpty();
|
||||
}
|
||||
|
@ -754,7 +752,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
|
||||
assertEquals("1", ourObservationProvider.getStoredResources().get(0).getIdElement().getVersionIdPart());
|
||||
|
||||
Subscription subscriptionTemp = myClient.read(Subscription.class, subscription2.getId());
|
||||
Subscription subscriptionTemp = myClient.read().resource(Subscription.class).withId(subscription2.getId()).execute();
|
||||
assertNotNull(subscriptionTemp);
|
||||
|
||||
subscriptionTemp.setCriteria(criteria1);
|
||||
|
@ -778,7 +776,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(4);
|
||||
|
||||
Observation observation3 = myClient.read(Observation.class, observationTemp3.getId());
|
||||
Observation observation3 = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute();
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
observation3.setCode(codeableConcept);
|
||||
Coding coding = codeableConcept.addCoding();
|
||||
|
@ -791,21 +789,21 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(4);
|
||||
|
||||
Observation observation3a = myClient.read(Observation.class, observationTemp3.getId());
|
||||
Observation observation3a = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute();
|
||||
|
||||
CodeableConcept codeableConcept1 = new CodeableConcept();
|
||||
observation3a.setCode(codeableConcept1);
|
||||
Coding coding1 = codeableConcept1.addCoding();
|
||||
coding1.setCode(code);
|
||||
coding1.setSystem("SNOMED-CT");
|
||||
myClient.update().resource(observation3a).withId(observation3a.getIdElement()).execute();
|
||||
myClient.update().resource(observation3a).withId(observation3a.getIdElement().toVersionless()).execute();
|
||||
|
||||
// Should see only one subscription notification
|
||||
waitForQueueToDrain();
|
||||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(5);
|
||||
|
||||
assertFalse(subscription1.getId().equals(subscription2.getId()));
|
||||
assertNotEquals(subscription1.getId(), subscription2.getId());
|
||||
assertThat(observation1.getId()).isNotEmpty();
|
||||
assertThat(observation2.getId()).isNotEmpty();
|
||||
}
|
||||
|
@ -830,7 +828,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
ourObservationProvider.waitForUpdateCount(1);
|
||||
assertEquals(Constants.CT_FHIR_XML_NEW, ourRestfulServer.getRequestContentTypes().get(0));
|
||||
|
||||
Subscription subscriptionTemp = myClient.read(Subscription.class, subscription2.getId());
|
||||
Subscription subscriptionTemp = myClient.read().resource(Subscription.class).withId(subscription2.getId()).execute();
|
||||
assertNotNull(subscriptionTemp);
|
||||
subscriptionTemp.setCriteria(criteria1);
|
||||
myClient.update().resource(subscriptionTemp).withId(subscriptionTemp.getIdElement()).execute();
|
||||
|
@ -852,7 +850,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(4);
|
||||
|
||||
Observation observation3 = myClient.read(Observation.class, observationTemp3.getId());
|
||||
Observation observation3 = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute();
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
observation3.setCode(codeableConcept);
|
||||
Coding coding = codeableConcept.addCoding();
|
||||
|
@ -865,21 +863,21 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(4);
|
||||
|
||||
Observation observation3a = myClient.read(Observation.class, observationTemp3.getId());
|
||||
Observation observation3a = myClient.read().resource(Observation.class).withId(observationTemp3.getId()).execute();
|
||||
|
||||
CodeableConcept codeableConcept1 = new CodeableConcept();
|
||||
observation3a.setCode(codeableConcept1);
|
||||
Coding coding1 = codeableConcept1.addCoding();
|
||||
coding1.setCode(code);
|
||||
coding1.setSystem("SNOMED-CT");
|
||||
myClient.update().resource(observation3a).withId(observation3a.getIdElement()).execute();
|
||||
myClient.update().resource(observation3a).withId(observation3a.getIdElement().toVersionless()).execute();
|
||||
|
||||
// Should see only one subscription notification
|
||||
waitForQueueToDrain();
|
||||
assertEquals(0, ourObservationProvider.getCountCreate());
|
||||
ourObservationProvider.waitForUpdateCount(5);
|
||||
|
||||
assertFalse(subscription1.getId().equals(subscription2.getId()));
|
||||
assertNotEquals(subscription1.getId(), subscription2.getId());
|
||||
assertThat(observation1.getId()).isNotEmpty();
|
||||
assertThat(observation2.getId()).isNotEmpty();
|
||||
}
|
||||
|
@ -1002,7 +1000,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
|
||||
ourLog.info("** About to send observation that wont match");
|
||||
|
||||
Observation observation1 = sendObservation(code, "SNOMED-CT");
|
||||
sendObservation(code, "SNOMED-CT");
|
||||
|
||||
// Criteria didn't match, shouldn't see any updates
|
||||
waitForQueueToDrain();
|
||||
|
@ -1018,7 +1016,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
waitForQueueToDrain();
|
||||
|
||||
ourLog.info("** About to send Observation 2");
|
||||
Observation observation2 = sendObservation(code, "SNOMED-CT");
|
||||
sendObservation(code, "SNOMED-CT");
|
||||
waitForQueueToDrain();
|
||||
|
||||
// Should see a subscription notification this time
|
||||
|
@ -1027,7 +1025,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
|
||||
myClient.delete().resourceById(new IdType("Subscription/" + subscription2.getId())).execute();
|
||||
|
||||
Observation observationTemp3 = sendObservation(code, "SNOMED-CT");
|
||||
sendObservation(code, "SNOMED-CT");
|
||||
|
||||
// No more matches
|
||||
Thread.sleep(1000);
|
||||
|
@ -1042,11 +1040,11 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
String criteria1 = "Observation?code=SNOMED-CT|" + code + "&_format=xml";
|
||||
String criteria2 = "Observation?code=SNOMED-CT|" + code + "111&_format=xml";
|
||||
|
||||
Subscription subscription1 = createSubscription(criteria1, payload);
|
||||
Subscription subscription2 = createSubscription(criteria2, payload);
|
||||
createSubscription(criteria1, payload);
|
||||
createSubscription(criteria2, payload);
|
||||
waitForActivatedSubscriptionCount(2);
|
||||
|
||||
Observation observation1 = sendObservation(code, "SNOMED-CT");
|
||||
sendObservation(code, "SNOMED-CT");
|
||||
|
||||
// Should see 1 subscription notification
|
||||
waitForQueueToDrain();
|
||||
|
@ -1056,7 +1054,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testRestHookSubscriptionInvalidCriteria() throws Exception {
|
||||
public void testRestHookSubscriptionInvalidCriteria() {
|
||||
String payload = "application/xml";
|
||||
|
||||
String criteria1 = "Observation?codeeeee=SNOMED-CT";
|
||||
|
@ -1235,7 +1233,7 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
|
|||
sp.setExpression("Observation.extension('Observation#accessType')");
|
||||
sp.setXpathUsage(SearchParameter.XPathUsageType.NORMAL);
|
||||
sp.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
mySearchParameterDao.create(sp);
|
||||
mySearchParameterDao.create(sp,mySrd);
|
||||
mySearchParamRegistry.forceRefresh();
|
||||
createSubscription(criteria, "application/json");
|
||||
waitForActivatedSubscriptionCount(1);
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -270,37 +270,38 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test {
|
|||
ourLog.debug("ValueSet:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myValidationSupport);
|
||||
|
||||
IValidationSupport.CodeValidationResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, null, null);
|
||||
IValidationSupport.CodeValidationResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, null, null);
|
||||
assertNull(result);
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, "BOGUS", null, null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, "BOGUS", null, null, null);
|
||||
assertFalse(result.isOk());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, "11378-7", null, null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, "11378-7", null, null, null);
|
||||
assertFalse(result.isOk());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsGuess, valueSet, null, "11378-7", null, null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsGuess, valueSet, null, "11378-7", null, null, null);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsGuess, valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsGuess, valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, "http://acme.org", "11378-7", null, null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, "http://acme.org", "11378-7", null, null, null);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
Coding coding = new Coding("http://acme.org", "11378-7", "Systolic blood pressure at First encounter");
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, coding, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, coding, null);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
codeableConcept.addCoding(new Coding("BOGUS", "BOGUS", "BOGUS"));
|
||||
codeableConcept.addCoding(coding);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, null, codeableConcept);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, null, codeableConcept);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
}
|
||||
|
@ -318,38 +319,39 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test {
|
|||
ourLog.debug("ValueSet:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet));
|
||||
|
||||
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
|
||||
ValidationSupportContext valCtx = new ValidationSupportContext(myValidationSupport);
|
||||
|
||||
IValidationSupport.CodeValidationResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, null, null);
|
||||
IValidationSupport.CodeValidationResult result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, null, null);
|
||||
assertNull(result);
|
||||
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, "BOGUS", null, null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, "BOGUS", null, null, null);
|
||||
assertFalse(result.isOk());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, "11378-7", null, null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, "11378-7", null, null, null);
|
||||
assertFalse(result.isOk());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsGuess, valueSet, null, "11378-7", null, null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsGuess, valueSet, null, "11378-7", null, null, null);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsGuess, valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsGuess, valueSet, null, "11378-7", "Systolic blood pressure at First encounter", null, null);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, "http://acme.org", "11378-7", null, null, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, "http://acme.org", "11378-7", null, null, null);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
Coding coding = new Coding("http://acme.org", "11378-7", "Systolic blood pressure at First encounter");
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, coding, null);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, coding, null);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
codeableConcept.addCoding(new Coding("BOGUS", "BOGUS", "BOGUS"));
|
||||
codeableConcept.addCoding(coding);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(optsNoGuess, valueSet, null, null, null, null, codeableConcept);
|
||||
result = myTermSvc.validateCodeIsInPreExpandedValueSet(valCtx, optsNoGuess, valueSet, null, null, null, null, codeableConcept);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
}
|
||||
|
|
|
@ -88,6 +88,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
@AfterEach
|
||||
public void afterEach() {
|
||||
SearchBuilder.setMaxPageSizeForTest(null);
|
||||
TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -502,7 +503,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
code = "AAA";
|
||||
outcome = myValueSetDao.validateCode(new CodeType(valueSetUrl), null, new CodeType(code), new CodeType(codeSystemUrl), null, null, null, mySrd);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code 'http://hl7.org/fhir/administrative-gender#AAA' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/administrative-gender'", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Unknown code 'http://hl7.org/fhir/administrative-gender#AAA' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/administrative-gender'");
|
||||
assertEquals("error", outcome.getSeverityCode());
|
||||
|
||||
}
|
||||
|
@ -986,7 +987,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
String code = "28571000087109";
|
||||
IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(new CodeType(valueSetUrl), null, new CodeType(code), new CodeType(codeSystemUrl), null, null, null, mySrd);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code 'http://invalid-cs#28571000087109' for in-memory expansion of ValueSet 'http://vs-with-invalid-cs'", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Unknown code 'http://invalid-cs#28571000087109' for in-memory expansion of ValueSet 'http://vs-with-invalid-cs'");
|
||||
assertEquals("error", outcome.getSeverityCode());
|
||||
|
||||
// Try validating a code that is in the missing CS that is imported by the VS
|
||||
|
@ -1390,7 +1391,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
TermValueSet termValueSet = optionalValueSetByUrl.get();
|
||||
assertSame(optionalValueSetByResourcePid.get(), termValueSet);
|
||||
ourLog.info("ValueSet:\n" + termValueSet.toString());
|
||||
ourLog.info("ValueSet:\n" + termValueSet);
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(0, termValueSet.getConcepts().size());
|
||||
|
@ -1408,7 +1409,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
TermValueSet termValueSet = optionalValueSetByUrl.get();
|
||||
assertSame(optionalValueSetByResourcePid.get(), termValueSet);
|
||||
ourLog.info("ValueSet:\n" + termValueSet.toString());
|
||||
ourLog.info("ValueSet:\n" + termValueSet);
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size());
|
||||
|
@ -1451,7 +1452,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
TermValueSet termValueSet = optionalValueSetByUrl.get();
|
||||
assertSame(optionalValueSetByResourcePid.get(), termValueSet);
|
||||
ourLog.info("ValueSet:\n" + termValueSet.toString());
|
||||
ourLog.info("ValueSet:\n" + termValueSet);
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(0, termValueSet.getConcepts().size());
|
||||
|
@ -1469,7 +1470,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
TermValueSet termValueSet = optionalValueSetByUrl.get();
|
||||
assertSame(optionalValueSetByResourcePid.get(), termValueSet);
|
||||
ourLog.info("ValueSet:\n" + termValueSet.toString());
|
||||
ourLog.info("ValueSet:\n" + termValueSet);
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size());
|
||||
|
@ -1509,7 +1510,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
TermValueSet termValueSet = optionalValueSetByUrl.get();
|
||||
assertSame(optionalValueSetByResourcePid.get(), termValueSet);
|
||||
ourLog.info("ValueSet:\n" + termValueSet.toString());
|
||||
ourLog.info("ValueSet:\n" + termValueSet);
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(0, termValueSet.getConcepts().size());
|
||||
|
@ -1527,7 +1528,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
TermValueSet termValueSet = optionalValueSetByUrl.get();
|
||||
assertSame(optionalValueSetByResourcePid.get(), termValueSet);
|
||||
ourLog.info("ValueSet:\n" + termValueSet.toString());
|
||||
ourLog.info("ValueSet:\n" + termValueSet);
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size());
|
||||
|
@ -1569,7 +1570,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
TermValueSet termValueSet = optionalValueSetByUrl.get();
|
||||
assertSame(optionalValueSetByResourcePid.get(), termValueSet);
|
||||
ourLog.info("ValueSet:\n" + termValueSet.toString());
|
||||
ourLog.info("ValueSet:\n" + termValueSet);
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(0, termValueSet.getConcepts().size());
|
||||
|
@ -1587,7 +1588,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
TermValueSet termValueSet = optionalValueSetByUrl.get();
|
||||
assertSame(optionalValueSetByResourcePid.get(), termValueSet);
|
||||
ourLog.info("ValueSet:\n" + termValueSet.toString());
|
||||
ourLog.info("ValueSet:\n" + termValueSet);
|
||||
assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl());
|
||||
assertEquals("Terminology Services Connectation #1 Extensional case #2", termValueSet.getName());
|
||||
assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size());
|
||||
|
@ -1641,6 +1642,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
ValueSet expansion;
|
||||
IdType vsId = new IdType("ValueSet/vaccinecode");
|
||||
|
||||
TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(true);
|
||||
|
||||
// Expand VS
|
||||
expansion = myValueSetDao.expand(vsId, new ValueSetExpansionOptions(), mySrd);
|
||||
assertThat(myValueSetTestUtil.extractExpansionMessage(expansion)).contains("Current status: NOT_EXPANDED");
|
||||
|
@ -1651,10 +1654,11 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
code = "28571000087109";
|
||||
String display = null;
|
||||
IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(null, vsId, new CodeType(code), new UriType(codeSystemUrl), new StringType(display), null, null, mySrd);
|
||||
assertTrue(outcome.isOk());
|
||||
assertTrue(outcome.isOk(), outcome.getMessage() + "\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.toParameters(myFhirContext)) + "\n" + outcome.getSourceDetails());
|
||||
assertEquals("28571000087109", outcome.getCode());
|
||||
assertEquals("MODERNA COVID-19 mRNA-1273", outcome.getDisplay());
|
||||
assertEquals("0.17", outcome.getCodeSystemVersion());
|
||||
assertThat(outcome.getSourceDetails()).contains("Code was validated against in-memory expansion of ValueSet: http://ehealthontario.ca/fhir/ValueSet/vaccinecode");
|
||||
|
||||
// Validate code - good code, bad display
|
||||
codeSystemUrl = "http://snomed.info/sct";
|
||||
|
@ -1664,7 +1668,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
assertTrue(outcome.isOk());
|
||||
assertEquals("28571000087109", outcome.getCode());
|
||||
assertEquals("MODERNA COVID-19 mRNA-1273", outcome.getDisplay());
|
||||
assertEquals("Concept Display \"BLAH\" does not match expected \"MODERNA COVID-19 mRNA-1273\" for 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet: http://ehealthontario.ca/fhir/ValueSet/vaccinecode", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Concept Display \"BLAH\" does not match expected \"MODERNA COVID-19 mRNA-1273\" for 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'");
|
||||
assertEquals("Code was validated against in-memory expansion of ValueSet: http://ehealthontario.ca/fhir/ValueSet/vaccinecode", outcome.getSourceDetails());
|
||||
assertEquals("0.17", outcome.getCodeSystemVersion());
|
||||
|
||||
|
@ -1734,6 +1738,50 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testExpandValueSet_VsHasNoCodes() {
|
||||
CodeSystem cs = new CodeSystem();
|
||||
cs.setId("snomed-ct-ca-imm");
|
||||
cs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
cs.setContent(CodeSystem.CodeSystemContentMode.FRAGMENT);
|
||||
cs.setUrl("http://snomed.info/sct");
|
||||
cs.setVersion("http://snomed.info/sct/20611000087101/version/20210331");
|
||||
cs.addConcept().setCode("28571000087109").setDisplay("MODERNA COVID-19 mRNA-1273");
|
||||
myCodeSystemDao.update(cs);
|
||||
|
||||
// No codes in this valueset
|
||||
ValueSet vs = new ValueSet();
|
||||
vs.setId("vaccinecode");
|
||||
vs.setUrl("http://ehealthontario.ca/fhir/ValueSet/vaccinecode");
|
||||
vs.setVersion("0.1.17");
|
||||
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
myValueSetDao.update(vs);
|
||||
|
||||
ConceptValidationOptions options = new ConceptValidationOptions();
|
||||
options.setValidateDisplay(true);
|
||||
|
||||
String codeSystemUrl;
|
||||
String code;
|
||||
ValueSet expansion;
|
||||
IdType vsId = new IdType("ValueSet/vaccinecode");
|
||||
|
||||
TermReadSvcImpl.setForceDisableHibernateSearchForUnitTest(true);
|
||||
|
||||
// Expand VS
|
||||
expansion = myValueSetDao.expand(vsId, new ValueSetExpansionOptions(), mySrd);
|
||||
assertThat(myValueSetTestUtil.extractExpansionMessage(expansion)).contains("Current status: NOT_EXPANDED");
|
||||
assertThat(myValueSetTestUtil.toCodes(expansion)).isEmpty();
|
||||
|
||||
// Validate code
|
||||
codeSystemUrl = "http://snomed.info/sct";
|
||||
code = "38765352";
|
||||
String display = null;
|
||||
IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(null, vsId, new CodeType(code), new UriType(codeSystemUrl), new StringType(display), null, null, mySrd);
|
||||
assertFalse(outcome.isOk());
|
||||
assertThat(outcome.getMessage()).contains("in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'");
|
||||
assertThat(outcome.getMessage()).contains("Empty compose list for include");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandValueSet_VsIsEnumeratedWithVersionedSystem_CsIsFragmentWithWrongVersion() {
|
||||
CodeSystem cs = new CodeSystem();
|
||||
|
@ -1776,7 +1824,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
code = "28571000087109";
|
||||
IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(new CodeType(valueSetUrl), null, new CodeType(code), new CodeType(codeSystemUrl), null, null, null, mySrd);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Unknown code 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'");
|
||||
assertEquals("error", outcome.getSeverityCode());
|
||||
|
||||
// Perform Pre-Expansion
|
||||
|
@ -1835,7 +1883,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
code = "28571000087109";
|
||||
IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(new CodeType(valueSetUrl), null, new CodeType(code), new CodeType(codeSystemUrl), null, null, null, mySrd);
|
||||
assertFalse(outcome.isOk());
|
||||
assertEquals("Unknown code 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'", outcome.getMessage());
|
||||
assertThat(outcome.getMessage()).contains("Unknown code 'http://snomed.info/sct#28571000087109' for in-memory expansion of ValueSet 'http://ehealthontario.ca/fhir/ValueSet/vaccinecode'");
|
||||
assertEquals("error", outcome.getSeverityCode());
|
||||
|
||||
// Perform Pre-Expansion
|
||||
|
@ -2087,12 +2135,12 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test implements IValueSet
|
|||
|
||||
// Validate code that is good
|
||||
IValidationSupport.CodeValidationResult outcome = myValueSetDao.validateCode(vs.getUrlElement(), null, new StringType("B"), cs.getUrlElement(), null, null, null, mySrd);
|
||||
assertEquals(true, outcome.isOk());
|
||||
assertTrue(outcome.isOk());
|
||||
assertThat(outcome.getMessage()).contains("Code validation occurred using a ValueSet expansion that was pre-calculated");
|
||||
|
||||
// Validate code that is bad
|
||||
outcome = myValueSetDao.validateCode(vs.getUrlElement(), null, new StringType("A"), cs.getUrlElement(), null, null, null, mySrd);
|
||||
assertEquals(false, outcome.isOk());
|
||||
assertFalse(outcome.isOk());
|
||||
assertThat(outcome.getMessage()).contains("Code validation occurred using a ValueSet expansion that was pre-calculated");
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -56,9 +56,7 @@ import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
|||
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
|
@ -434,10 +432,6 @@ public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuil
|
|||
|
||||
@AfterEach
|
||||
public void afterClearTerminologyCaches() {
|
||||
TermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
baseHapiTerminologySvc.clearCaches();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermDeferredStorageSvcImpl deferredStorageSvc = AopTestUtils.getTargetObject(myTermDeferredStorageSvc);
|
||||
deferredStorageSvc.clearDeferred();
|
||||
}
|
||||
|
@ -446,7 +440,6 @@ public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuil
|
|||
@Override
|
||||
protected void afterResetInterceptors() {
|
||||
super.afterResetInterceptors();
|
||||
// myInterceptorRegistry.unregisterInterceptor(myPerformanceTracingLoggingInterceptor);
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
|
|
|
@ -113,7 +113,7 @@ public class FhirResourceDaoR5ValueSetTest extends BaseJpaR5Test {
|
|||
IValidationSupport.CodeValidationResult result = myValueSetDao.validateCode(valueSetIdentifier, id, code, system, display, coding, codeableConcept, mySrd);
|
||||
assertTrue(result.isOk());
|
||||
assertEquals("Systolic blood pressure at First encounter", result.getDisplay());
|
||||
assertEquals("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet: http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", result.getMessage());
|
||||
assertThat(result.getMessage()).contains("Concept Display \"Systolic blood pressure at First encounterXXXX\" does not match expected \"Systolic blood pressure at First encounter\" for 'http://acme.org#11378-7' for in-memory expansion of ValueSet 'http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2'");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -1253,7 +1253,8 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test {
|
|||
assertEquals(false, ((BooleanType) respParam.getParameter().get(0).getValue()).getValue());
|
||||
|
||||
assertEquals("message", respParam.getParameter().get(1).getName());
|
||||
assertEquals("Unknown code 'http://hl7.org/fhir/administrative-gender#male' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/marital-status'", ((StringType) respParam.getParameter().get(1).getValue()).getValue());
|
||||
String message = ((StringType) respParam.getParameter().get(1).getValue()).getValue();
|
||||
assertThat(message).contains("Unknown code 'http://hl7.org/fhir/administrative-gender#male' for in-memory expansion of ValueSet 'http://hl7.org/fhir/ValueSet/marital-status'", message);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -223,7 +223,7 @@ public abstract class CompositeSearchParameterTestCases implements ITestDataBuil
|
|||
searchParameter.addComponent(
|
||||
componentFrom("http://hl7.org/fhir/SearchParameter/RiskAssessment-probability", "RiskAssessment"));
|
||||
searchParameter.setExtension(List.of(theExtension));
|
||||
doCreateResource(searchParameter);
|
||||
ourLog.info("Created SP: {}", doCreateResource(searchParameter).toUnqualifiedVersionless());
|
||||
|
||||
// enable this sp.
|
||||
myTestDaoSearch.getSearchParamRegistry().forceRefresh();
|
||||
|
|
|
@ -51,9 +51,7 @@ import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
|||
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
|
@ -365,10 +363,6 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
|
||||
@AfterEach
|
||||
public void afterClearTerminologyCaches() {
|
||||
TermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
baseHapiTerminologySvc.clearCaches();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermDeferredStorageSvcImpl deferredSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc);
|
||||
deferredSvc.clearDeferred();
|
||||
}
|
||||
|
|
|
@ -598,10 +598,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
|
||||
@AfterEach
|
||||
public void afterClearTerminologyCaches() {
|
||||
TermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
baseHapiTerminologySvc.clearCaches();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermDeferredStorageSvcImpl termDeferredStorageSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc);
|
||||
termDeferredStorageSvc.clearDeferred();
|
||||
|
||||
|
|
|
@ -394,9 +394,6 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
if (myMemoryCacheService != null) {
|
||||
myMemoryCacheService.invalidateAllCaches();
|
||||
}
|
||||
if (myJpaPersistedValidationSupport != null) {
|
||||
ProxyUtil.getSingletonTarget(myJpaPersistedValidationSupport, JpaPersistedResourceValidationSupport.class).clearCaches();
|
||||
}
|
||||
if (myFhirInstanceValidator != null) {
|
||||
myFhirInstanceValidator.invalidateCaches();
|
||||
}
|
||||
|
|
|
@ -37,9 +37,7 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.term.IValueSetConceptAccumulator;
|
||||
import ca.uhn.fhir.jpa.term.TermConceptMappingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
|
@ -179,10 +177,6 @@ public abstract class BaseValueSetHSearchExpansionR4Test extends BaseJpaTest {
|
|||
|
||||
@AfterEach
|
||||
public void afterClearTerminologyCaches() {
|
||||
TermReadSvcImpl baseHapiTerminologySvc = AopTestUtils.getTargetObject(myTermSvc);
|
||||
baseHapiTerminologySvc.clearCaches();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationCache();
|
||||
TermConceptMappingSvcImpl.clearOurLastResultsFromTranslationWithReverseCache();
|
||||
TermDeferredStorageSvcImpl deferredSvc = AopTestUtils.getTargetObject(myTerminologyDeferredStorageSvc);
|
||||
deferredSvc.clearDeferred();
|
||||
}
|
||||
|
|
|
@ -3,9 +3,12 @@ package ca.uhn.fhir.jpa.cache;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.cross.JpaResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
|
@ -15,6 +18,7 @@ import jakarta.persistence.criteria.CriteriaQuery;
|
|||
import jakarta.persistence.criteria.Path;
|
||||
import jakarta.persistence.criteria.Root;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
@ -28,6 +32,7 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -91,11 +96,12 @@ public class ResourceVersionSvcTest {
|
|||
* @param theResourcePacks
|
||||
*/
|
||||
private void mockReturnsFor_getIdsOfExistingResources(ResourceIdPackage... theResourcePacks) {
|
||||
List<IResourcePersistentId> resourcePersistentIds = new ArrayList<>();
|
||||
List<IResourcePersistentId<Long>> resourcePersistentIds = new ArrayList<>();
|
||||
List<Object[]> matches = new ArrayList<>();
|
||||
|
||||
for (ResourceIdPackage pack : theResourcePacks) {
|
||||
resourcePersistentIds.add(pack.myPid);
|
||||
pack.myPid.setAssociatedResourceId(pack.MyResourceId);
|
||||
|
||||
matches.add(getResourceTableRecordForResourceTypeAndPid(
|
||||
pack.MyResourceId.getResourceType(),
|
||||
|
@ -104,11 +110,19 @@ public class ResourceVersionSvcTest {
|
|||
));
|
||||
}
|
||||
|
||||
IResourcePersistentId first = resourcePersistentIds.remove(0);
|
||||
IResourcePersistentId<Long> first = resourcePersistentIds.remove(0);
|
||||
if (resourcePersistentIds.isEmpty()) {
|
||||
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(Collections.singletonList(first));
|
||||
when(myIdHelperService.resolveResourceIdentities(any(), any(), any()))
|
||||
.thenReturn(Map.of(first.getAssociatedResourceId(), new JpaResourceLookup(first.getResourceType(), first.getAssociatedResourceId().getIdPart(), (JpaPid) first, null, null)));
|
||||
} else {
|
||||
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(resourcePersistentIds);
|
||||
|
||||
HashMap<IIdType, IResourceLookup<JpaPid>> map = new HashMap<>();
|
||||
for (var next : resourcePersistentIds) {
|
||||
map.put(next.getAssociatedResourceId(), new JpaResourceLookup(next.getResourceType(),next.getAssociatedResourceId().getIdPart() ,(JpaPid) next, null, null));
|
||||
}
|
||||
|
||||
when(myIdHelperService.resolveResourceIdentities(any(), any(), any()))
|
||||
.thenReturn(map);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -32,6 +32,7 @@ import ca.uhn.fhir.mdm.svc.MdmSearchExpansionSvc;
|
|||
import ca.uhn.fhir.rest.api.server.IPreResourceShowDetails;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.ResourceReferenceInfo;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
|
@ -90,7 +91,10 @@ public class MdmReadVirtualizationInterceptor<P extends IResourcePersistentId<?>
|
|||
@Hook(
|
||||
value = Pointcut.STORAGE_PRESEARCH_REGISTERED,
|
||||
order = MdmConstants.ORDER_PRESEARCH_REGISTERED_MDM_READ_VIRTUALIZATION_INTERCEPTOR)
|
||||
public void preSearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) {
|
||||
public void preSearchRegistered(
|
||||
RequestDetails theRequestDetails,
|
||||
SearchParameterMap theSearchParameterMap,
|
||||
ICachedSearchDetails theSearchDetails) {
|
||||
ourMdmTroubleshootingLog
|
||||
.atTrace()
|
||||
.setMessage("MDM virtualization original search: {}{}")
|
||||
|
@ -98,14 +102,16 @@ public class MdmReadVirtualizationInterceptor<P extends IResourcePersistentId<?>
|
|||
.addArgument(() -> theSearchParameterMap.toNormalizedQueryString(myFhirContext))
|
||||
.log();
|
||||
|
||||
String resourceType = theSearchDetails.getResourceType();
|
||||
|
||||
if (theSearchParameterMap.hasIncludes() || theSearchParameterMap.hasRevIncludes()) {
|
||||
myMdmSearchExpansionSvc.expandSearchAndStoreInRequestDetails(
|
||||
theRequestDetails, theSearchParameterMap, PARAM_TESTER_ALL);
|
||||
resourceType, theRequestDetails, theSearchParameterMap, PARAM_TESTER_ALL);
|
||||
} else {
|
||||
// If we don't have any includes, it's not worth auto-expanding the _id parameter since we'll only end
|
||||
// up filtering out the extra resources afterward
|
||||
myMdmSearchExpansionSvc.expandSearchAndStoreInRequestDetails(
|
||||
theRequestDetails, theSearchParameterMap, PARAM_TESTER_NO_RES_ID);
|
||||
resourceType, theRequestDetails, theSearchParameterMap, PARAM_TESTER_NO_RES_ID);
|
||||
}
|
||||
|
||||
ourMdmTroubleshootingLog
|
||||
|
@ -116,7 +122,6 @@ public class MdmReadVirtualizationInterceptor<P extends IResourcePersistentId<?>
|
|||
.log();
|
||||
}
|
||||
|
||||
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
||||
@Hook(Pointcut.STORAGE_PRESHOW_RESOURCES)
|
||||
public void preShowResources(RequestDetails theRequestDetails, IPreResourceShowDetails theDetails) {
|
||||
MdmSearchExpansionResults expansionResults = MdmSearchExpansionSvc.getCachedExpansionResults(theRequestDetails);
|
||||
|
|
|
@ -29,6 +29,7 @@ import ca.uhn.fhir.mdm.svc.MdmSearchExpansionSvc;
|
|||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.util.ICachedSearchDetails;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
|
@ -57,11 +58,15 @@ public class MdmSearchExpandingInterceptor {
|
|||
@Hook(
|
||||
value = Pointcut.STORAGE_PRESEARCH_REGISTERED,
|
||||
order = MdmConstants.ORDER_PRESEARCH_REGISTERED_MDM_SEARCH_EXPANDING_INTERCEPTOR)
|
||||
public void hook(RequestDetails theRequestDetails, SearchParameterMap theSearchParameterMap) {
|
||||
public void hook(
|
||||
RequestDetails theRequestDetails,
|
||||
SearchParameterMap theSearchParameterMap,
|
||||
ICachedSearchDetails theSearchDetails) {
|
||||
|
||||
if (myStorageSettings.isAllowMdmExpansion()) {
|
||||
String resourceType = theSearchDetails.getResourceType();
|
||||
myMdmSearchExpansionSvc.expandSearchAndStoreInRequestDetails(
|
||||
theRequestDetails, theSearchParameterMap, PARAM_TESTER);
|
||||
resourceType, theRequestDetails, theSearchParameterMap, PARAM_TESTER);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,6 +43,8 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
public class MdmSearchExpansionSvc {
|
||||
private static final String EXPANSION_RESULTS = MdmSearchExpansionSvc.class.getName() + "_EXPANSION_RESULTS";
|
||||
private static final String RESOURCE_NAME = MdmSearchExpansionSvc.class.getName() + "_RESOURCE_NAME";
|
||||
|
@ -75,6 +77,7 @@ public class MdmSearchExpansionSvc {
|
|||
* @since 8.0.0
|
||||
*/
|
||||
public MdmSearchExpansionResults expandSearchAndStoreInRequestDetails(
|
||||
String theResourceName,
|
||||
@Nullable RequestDetails theRequestDetails,
|
||||
@Nonnull SearchParameterMap theSearchParameterMap,
|
||||
IParamTester theParamTester) {
|
||||
|
@ -113,12 +116,7 @@ public class MdmSearchExpansionSvc {
|
|||
// here we will know if it's an _id param or not
|
||||
// from theSearchParameterMap.keySet()
|
||||
expandAnyReferenceParameters(
|
||||
requestPartitionId,
|
||||
theRequestDetails.getResourceName(),
|
||||
paramName,
|
||||
orList,
|
||||
theParamTester,
|
||||
expansionResults);
|
||||
requestPartitionId, theResourceName, paramName, orList, theParamTester, expansionResults);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,9 +126,8 @@ public class MdmSearchExpansionSvc {
|
|||
* Note: Do this at the end so that the query string reflects the post-translated
|
||||
* query string
|
||||
*/
|
||||
String resourceName = theRequestDetails.getResourceName();
|
||||
String queryString = theSearchParameterMap.toNormalizedQueryString(myFhirContext);
|
||||
theRequestDetails.getUserData().put(RESOURCE_NAME, resourceName);
|
||||
theRequestDetails.getUserData().put(RESOURCE_NAME, theResourceName);
|
||||
theRequestDetails.getUserData().put(QUERY_STRING, queryString);
|
||||
|
||||
return expansionResults;
|
||||
|
@ -195,7 +192,7 @@ public class MdmSearchExpansionSvc {
|
|||
}
|
||||
|
||||
private String addResourceTypeIfNecessary(String theResourceType, String theResourceId) {
|
||||
if (theResourceId.contains("/")) {
|
||||
if (theResourceId.contains("/") || isBlank(theResourceType)) {
|
||||
return theResourceId;
|
||||
} else {
|
||||
return theResourceType + "/" + theResourceId;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -21,6 +21,8 @@ package ca.uhn.fhir.rest.server.util;
|
|||
|
||||
public interface ICachedSearchDetails {
|
||||
|
||||
String getResourceType();
|
||||
|
||||
String getUuid();
|
||||
|
||||
void setUuid(String theUuid);
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<artifactId>hapi-fhir-serviceloaders</artifactId>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
@ -21,7 +21,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-caching-api</artifactId>
|
||||
<version>7.7.8-SNAPSHOT</version>
|
||||
<version>7.7.9-SNAPSHOT</version>
|
||||
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue