Compare commits

...

3 Commits

Author SHA1 Message Date
dotasek e54c98e8bf
Merge 08c441ca40 into d4f1766ca5 2024-11-23 06:01:00 -05:00
James Agnew d4f1766ca5
Optimize transaction writes (#6460)
* Optimize transaction writes

* Cleanup

* Work on tests

* Work on fixes

* Fixes

* Fixes

* Test fixes

* Cleanup

* Cleanup

* Work on tests

* Work on tests

* Test fixes

* Test fixes

* Remove redundant test

* Test fixes

* Test fix

* Test fixes

* Test fixes

* Test fixes

* Add changelog

* Checkstyle fixes

* Test fixes

* Spotless

* Test cleanup

* Spotless

* Test fix

* Work on review comments

* Address review comments

* Test fixes

* Ongoing test cleanup work

* Work on tests

* Query count tests passing

* Test fixes

* Test fixes

* Rmove fixme

* Test cleanup

* Test fix

* Remove default method

* Service celanup

* Test fix

* version bump

* Test cleanup
2024-11-21 19:22:42 -05:00
dotasek 08c441ca40 Update swagger-ui link to https 2022-11-08 09:30:41 -05:00
153 changed files with 1890 additions and 1262 deletions

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -40,13 +40,22 @@ import java.util.stream.Stream;
*/
public class TaskChunker<T> {
public void chunk(Collection<T> theInput, int theChunkSize, Consumer<List<T>> theBatchConsumer) {
public static <T> void chunk(List<T> theInput, int theChunkSize, Consumer<List<T>> theBatchConsumer) {
if (theInput.size() <= theChunkSize) {
theBatchConsumer.accept(theInput);
return;
}
chunk((Collection<T>) theInput, theChunkSize, theBatchConsumer);
}
public static <T> void chunk(Collection<T> theInput, int theChunkSize, Consumer<List<T>> theBatchConsumer) {
List<T> input;
if (theInput instanceof List) {
input = (List<T>) theInput;
} else {
input = new ArrayList<>(theInput);
}
for (int i = 0; i < input.size(); i += theChunkSize) {
int to = i + theChunkSize;
to = Math.min(to, input.size());
@ -56,12 +65,11 @@ public class TaskChunker<T> {
}
@Nonnull
public <T> Stream<List<T>> chunk(Stream<T> theStream, int theChunkSize) {
public static <T> Stream<List<T>> chunk(Stream<T> theStream, int theChunkSize) {
return StreamUtil.partition(theStream, theChunkSize);
}
@Nonnull
public void chunk(Iterator<T> theIterator, int theChunkSize, Consumer<List<T>> theListConsumer) {
public static <T> void chunk(Iterator<T> theIterator, int theChunkSize, Consumer<List<T>> theListConsumer) {
chunk(Streams.stream(theIterator), theChunkSize).forEach(theListConsumer);
}
}

View File

@ -87,6 +87,7 @@ ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFail
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check.
ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload - URL "{0}" and value "{1}"
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.cantUndeleteWithDeletesDisabled=Unable to restore previously deleted resource as deletes are disabled on this server.
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.invalidMatchUrlInvalidResourceType=Invalid match URL "{0}" - Unknown resource type: "{1}"
ca.uhn.fhir.jpa.dao.BaseStorageDao.invalidMatchUrlNoMatches=Invalid match URL "{0}" - No resources match this search
@ -200,6 +201,7 @@ ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder.invalidTar
ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl.invalidResourceType=Invalid/unsupported resource type: "{0}"
ca.uhn.fhir.jpa.dao.index.IdHelperService.nonUniqueForcedId=Non-unique ID specified, can not process request
ca.uhn.fhir.jpa.dao.index.IdHelperService.deletedId=Resource {0} has been deleted
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.noIdSupplied=No Partition ID supplied
ca.uhn.fhir.jpa.partition.PartitionLookupSvcImpl.missingPartitionIdOrName=Partition must have an ID and a Name

View File

@ -37,7 +37,7 @@ public class TaskChunkerTest {
List<Integer> input = newIntRangeList(0, 35);
// Execute
new TaskChunker<Integer>().chunk(input, 10, myConsumer);
TaskChunker.chunk(input, 10, myConsumer);
// Verify
verify(myConsumer, times(4)).accept(myConsumerCaptor.capture());

View File

@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
@ -12,7 +12,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,5 @@
---
type: change
issue: 6460
title: "The HFJ_RES_LINK table with no longer store the `PARTITION_DATE` value for the indexed link target
resource, as this was an unused feature which has been removed as a part of a larger performance optimization."

View File

@ -0,0 +1,9 @@
---
type: perf
issue: 6460
title: "The JPA server FHIR transaction processor will now pre-fetch the target
resource state for references to resources that don't also appear in the
transaction bundle. This means that if you process a large FHIR transaction containing
many references to other resources in the repository that are not also being
updated in the same transaction, you should see a very significant improvement
in performance."

View File

@ -0,0 +1,7 @@
---
type: perf
issue: 6460
title: "The JPA server FHIR transaction processor will now more aggressively cache
resource IDs for previously seen resources, reducing the number of database reads
required when processing transactions. This should provide a noticeable improvement
in performance when processing transactions which update pre-existing resources."

View File

@ -0,0 +1,5 @@
---
type: change
issue: 6460
title: "If deletes are disabled in the JPA server, it is no longer possible to un-delete
a resource (i.e. update a previously deleted resource to make it non-deleted)."

View File

@ -0,0 +1,9 @@
---
type: change
issue: 6460
title: "When performing a FHIR Transaction which deletes and then updates (or otherwise
un-deletes) the same resource within a single transaction, the delete was previously
not stored as a distinct version (meaning that the resource version was only
incremented once, and no delete was actually stored in the resource history. This
has been changed so that deletes will always appear as a distinct entry in the
resource history."

View File

@ -36,4 +36,4 @@ You then simply have to register the interceptor against your RestfulServer inst
# Demonstration
See the HAPI FHIR Test Server for a demonstration of HAPI FHIR OpenAPI functionality: http://hapi.fhir.org/baseR4/swagger-ui/
See the HAPI FHIR Test Server for a demonstration of HAPI FHIR OpenAPI functionality: https://hapi.fhir.org/baseR4/swagger-ui/

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -20,7 +20,11 @@
package ca.uhn.fhir.jpa.config;
import com.google.common.base.Strings;
import org.hibernate.cfg.AvailableSettings;
import jakarta.annotation.Nonnull;
import org.hibernate.cfg.BatchSettings;
import org.hibernate.cfg.JdbcSettings;
import org.hibernate.cfg.ManagedBeanSettings;
import org.hibernate.cfg.QuerySettings;
import org.hibernate.query.criteria.ValueHandlingMode;
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
@ -46,18 +50,19 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain
myConfigurableListableBeanFactory = theConfigurableListableBeanFactory;
}
@Nonnull
@Override
public Map<String, Object> getJpaPropertyMap() {
Map<String, Object> retVal = super.getJpaPropertyMap();
// SOMEDAY these defaults can be set in the constructor. setJpaProperties does a merge.
if (!retVal.containsKey(AvailableSettings.CRITERIA_VALUE_HANDLING_MODE)) {
retVal.put(AvailableSettings.CRITERIA_VALUE_HANDLING_MODE, ValueHandlingMode.BIND);
if (!retVal.containsKey(QuerySettings.CRITERIA_VALUE_HANDLING_MODE)) {
retVal.put(QuerySettings.CRITERIA_VALUE_HANDLING_MODE, ValueHandlingMode.BIND);
}
if (!retVal.containsKey(AvailableSettings.CONNECTION_HANDLING)) {
if (!retVal.containsKey(JdbcSettings.CONNECTION_HANDLING)) {
retVal.put(
AvailableSettings.CONNECTION_HANDLING,
JdbcSettings.CONNECTION_HANDLING,
PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_RELEASE_AFTER_TRANSACTION);
}
@ -65,26 +70,25 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain
* Set some performance options
*/
if (!retVal.containsKey(AvailableSettings.STATEMENT_BATCH_SIZE)) {
retVal.put(AvailableSettings.STATEMENT_BATCH_SIZE, "30");
if (!retVal.containsKey(BatchSettings.STATEMENT_BATCH_SIZE)) {
retVal.put(BatchSettings.STATEMENT_BATCH_SIZE, "30");
}
if (!retVal.containsKey(AvailableSettings.ORDER_INSERTS)) {
retVal.put(AvailableSettings.ORDER_INSERTS, "true");
if (!retVal.containsKey(BatchSettings.ORDER_INSERTS)) {
retVal.put(BatchSettings.ORDER_INSERTS, "true");
}
if (!retVal.containsKey(AvailableSettings.ORDER_UPDATES)) {
retVal.put(AvailableSettings.ORDER_UPDATES, "true");
if (!retVal.containsKey(BatchSettings.ORDER_UPDATES)) {
retVal.put(BatchSettings.ORDER_UPDATES, "true");
}
if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) {
retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true");
if (!retVal.containsKey(BatchSettings.BATCH_VERSIONED_DATA)) {
retVal.put(BatchSettings.BATCH_VERSIONED_DATA, "true");
}
// Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate
// needs
// in order to be able to resolve beans, so we add it back in manually here
if (!retVal.containsKey(AvailableSettings.BEAN_CONTAINER)) {
retVal.put(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory));
// needs in order to be able to resolve beans, so we add it back in manually here
if (!retVal.containsKey(ManagedBeanSettings.BEAN_CONTAINER)) {
retVal.put(ManagedBeanSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory));
}
return retVal;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -42,7 +42,6 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
import ca.uhn.fhir.mdm.dao.IMdmLinkDao;
import ca.uhn.fhir.mdm.model.MdmPidTuple;
@ -59,6 +58,7 @@ import ca.uhn.fhir.util.ExtensionUtil;
import ca.uhn.fhir.util.HapiExtensions;
import ca.uhn.fhir.util.Logs;
import ca.uhn.fhir.util.SearchParameterUtil;
import ca.uhn.fhir.util.TaskChunker;
import jakarta.annotation.Nonnull;
import jakarta.persistence.EntityManager;
import org.apache.commons.lang3.StringUtils;
@ -315,8 +315,7 @@ public class JpaBulkExportProcessor implements IBulkExportProcessor<JpaPid> {
// for each patient pid ->
// search for the target resources, with their correct patient references, chunked.
// The results will be jammed into myReadPids
QueryChunker<JpaPid> queryChunker = new QueryChunker<>();
queryChunker.chunk(expandedMemberResourceIds, QUERY_CHUNK_SIZE, (idChunk) -> {
TaskChunker.chunk(expandedMemberResourceIds, QUERY_CHUNK_SIZE, (idChunk) -> {
try {
queryResourceTypeWithReferencesToPatients(pids, idChunk, theParams, theDef);
} catch (IOException ex) {

View File

@ -1323,7 +1323,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
* the previous version entity.
*/
if (historyEntry == null) {
historyEntry = theEntity.toHistory(versionedTags);
historyEntry = theEntity.toHistory(versionedTags && theEntity.getDeleted() == null);
}
historyEntry.setEncoding(theChanged.getEncoding());
@ -1331,7 +1331,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
historyEntry.setResourceTextVc(theChanged.getResourceText());
ourLog.debug("Saving history entry ID[{}] for RES_ID[{}]", historyEntry.getId(), historyEntry.getResourceId());
myResourceHistoryTableDao.save(historyEntry);
myEntityManager.persist(historyEntry);
theEntity.setCurrentVersionEntity(historyEntry);
// Save resource source
@ -1489,6 +1489,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
wasDeleted = theOldResource.isDeleted();
}
if (wasDeleted && !myStorageSettings.isDeleteEnabled()) {
String msg = myContext.getLocalizer().getMessage(BaseHapiFhirDao.class, "cantUndeleteWithDeletesDisabled");
throw new InvalidRequestException(Msg.code(2573) + msg);
}
DaoMethodOutcome outcome = toMethodOutcome(
theRequestDetails, savedEntity, theResource, theMatchUrl, theOperationType)
.setCreated(wasDeleted);

View File

@ -43,6 +43,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
@ -567,8 +568,11 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// Pre-cache the resource ID
jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext));
myIdHelperService.addResolvedPidToForcedId(
jpaPid, theRequestPartitionId, getResourceName(), entity.getFhirId(), null);
String fhirId = entity.getFhirId();
if (fhirId == null) {
fhirId = Long.toString(entity.getId());
}
myIdHelperService.addResolvedPidToFhirId(jpaPid, theRequestPartitionId, getResourceName(), fhirId, null);
theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid);
theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource);
@ -1736,8 +1740,13 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
validateResourceTypeAndThrowInvalidRequestException(theId);
BaseHasResource entity;
JpaPid pid = myIdHelperService.resolveResourcePersistentIds(
requestPartitionId, getResourceName(), theId.getIdPart());
JpaPid pid = myIdHelperService
.resolveResourceIdentity(
requestPartitionId,
getResourceName(),
theId.getIdPart(),
ResolveIdentityMode.includeDeleted().cacheOk())
.getPersistentId();
Set<Integer> readPartitions = null;
if (requestPartitionId.isAllPartitions()) {
entity = myEntityManager.find(ResourceTable.class, pid.getId());
@ -1779,10 +1788,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
if (entity == null) {
throw new ResourceNotFoundException(Msg.code(1996) + "Resource " + theId + " is not known");
}
if (theId.hasVersionIdPart()) {
if (!theId.isVersionIdPartValidLong()) {
throw new ResourceNotFoundException(Msg.code(978)
@ -1822,7 +1827,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
}
Validate.notNull(entity);
if (entity == null) {
throw new ResourceNotFoundException(Msg.code(1996) + "Resource " + theId + " is not known");
}
validateResourceType(entity);
if (theCheckForForcedId) {
@ -1871,8 +1879,27 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
}
if (persistentId == null) {
persistentId = myIdHelperService.resolveResourcePersistentIds(
theRequestPartitionId, getResourceName(), theId.getIdPart());
String resourceName = getResourceName();
if (myStorageSettings.getResourceClientIdStrategy()
== JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC) {
if (theId.isIdPartValidLong()) {
/*
* If it's a pure numeric ID and we are in ALPHANUMERIC mode, then the number
* corresponds to a DB PID. In this case we want to resolve it regardless of
* which type the client has supplied. This is because DB PIDs are unique across
* all resource types (unlike FHIR_IDs which are namespaced to the resource type).
* We want to load the resource with that PID regardless of type because if
* the user is trying to update it we want to fail if the type is wrong, as
* opposed to trying to create a new instance.
*/
resourceName = null;
}
}
persistentId = myIdHelperService.resolveResourceIdentityPid(
theRequestPartitionId,
resourceName,
theId.getIdPart(),
ResolveIdentityMode.includeDeleted().cacheOk());
}
ResourceTable entity = myEntityManager.find(ResourceTable.class, persistentId.getId());

View File

@ -192,7 +192,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
HapiTransactionService.requireTransaction();
List<Long> pids = theResolvedIds.stream().map(t -> ((JpaPid) t).getId()).collect(Collectors.toList());
new QueryChunker<Long>().chunk(pids, idChunk -> {
QueryChunker.chunk(pids, idChunk -> {
/*
* Pre-fetch the resources we're touching in this transaction in mass - this reduced the

View File

@ -81,7 +81,7 @@ public class HistoryBuilder {
private FhirContext myCtx;
@Autowired
private IIdHelperService myIdHelperService;
private IIdHelperService<JpaPid> myIdHelperService;
/**
* Constructor
@ -150,13 +150,13 @@ public class HistoryBuilder {
query.setMaxResults(theToIndex - theFromIndex);
List<ResourceHistoryTable> tables = query.getResultList();
if (tables.size() > 0) {
if (!tables.isEmpty()) {
ImmutableListMultimap<Long, ResourceHistoryTable> resourceIdToHistoryEntries =
Multimaps.index(tables, ResourceHistoryTable::getResourceId);
Set<JpaPid> pids = resourceIdToHistoryEntries.keySet().stream()
.map(JpaPid::fromId)
.collect(Collectors.toSet());
PersistentIdToForcedIdMap pidToForcedId = myIdHelperService.translatePidsToForcedIds(pids);
PersistentIdToForcedIdMap<JpaPid> pidToForcedId = myIdHelperService.translatePidsToForcedIds(pids);
ourLog.trace("Translated IDs: {}", pidToForcedId.getResourcePersistentIdOptionalMap());
for (Long nextResourceId : resourceIdToHistoryEntries.keySet()) {

View File

@ -242,13 +242,15 @@ public class JpaResourceDaoCodeSystem<T extends IBaseResource> extends BaseHapiF
theTransactionDetails,
theForceUpdate,
theCreateNewHistoryEntry);
if (!retVal.isUnchangedInCurrentOperation()) {
if (thePerformIndexing) {
if (!retVal.isUnchangedInCurrentOperation()) {
org.hl7.fhir.r4.model.CodeSystem cs = myVersionCanonicalizer.codeSystemToCanonical(theResource);
addPidToResource(theEntity, cs);
org.hl7.fhir.r4.model.CodeSystem cs = myVersionCanonicalizer.codeSystemToCanonical(theResource);
addPidToResource(theEntity, cs);
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(
cs, (ResourceTable) theEntity, theRequest);
myTerminologyCodeSystemStorageSvc.storeNewCodeSystemVersionIfNeeded(
cs, (ResourceTable) theEntity, theRequest);
}
}
return retVal;

View File

@ -22,6 +22,8 @@ package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
@ -39,12 +41,15 @@ import jakarta.persistence.PersistenceContext;
import jakarta.persistence.PersistenceContextType;
import jakarta.servlet.http.HttpServletResponse;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Observation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapiFhirResourceDao<T>
@ -138,13 +143,14 @@ public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapi
patientParams.addAll(theSearchParameterMap.get(getSubjectParamName()));
}
Map<IIdType, ReferenceParam> ids = new HashMap<>();
for (List<? extends IQueryParameterType> nextPatientList : patientParams) {
for (IQueryParameterType nextOr : nextPatientList) {
if (nextOr instanceof ReferenceParam) {
ReferenceParam ref = (ReferenceParam) nextOr;
JpaPid pid = myIdHelperService.resolveResourcePersistentIds(
requestPartitionId, ref.getResourceType(), ref.getIdPart());
orderedSubjectReferenceMap.put(pid.getId(), nextOr);
IIdType id = myFhirContext.getVersion().newIdType();
id.setParts(null, ref.getResourceType(), ref.getIdPart(), null);
ids.put(id, ref);
} else {
throw new IllegalArgumentException(
Msg.code(942) + "Invalid token type (expecting ReferenceParam): " + nextOr.getClass());
@ -152,6 +158,15 @@ public class JpaResourceDaoObservation<T extends IBaseResource> extends BaseHapi
}
}
Map<IIdType, IResourceLookup<JpaPid>> resolvedIds = myIdHelperService.resolveResourceIdentities(
requestPartitionId,
ids.keySet(),
ResolveIdentityMode.includeDeleted().cacheOk());
for (Map.Entry<IIdType, ReferenceParam> entry : ids.entrySet()) {
IResourceLookup<JpaPid> lookup = resolvedIds.get(entry.getKey());
orderedSubjectReferenceMap.put(lookup.getPersistentId().getId(), entry.getValue());
}
theSearchParameterMap.remove(getSubjectParamName());
theSearchParameterMap.remove(getPatientParamName());

View File

@ -303,12 +303,14 @@ public class JpaResourceDaoValueSet<T extends IBaseResource> extends BaseHapiFhi
theForceUpdate,
theCreateNewHistoryEntry);
if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
if (retVal.getDeleted() == null) {
ValueSet valueSet = myVersionCanonicalizer.valueSetToCanonical(theResource);
myTerminologySvc.storeTermValueSet(retVal, valueSet);
} else {
myTerminologySvc.deleteValueSetAndChildren(retVal);
if (thePerformIndexing) {
if (getStorageSettings().isPreExpandValueSets() && !retVal.isUnchangedInCurrentOperation()) {
if (retVal.getDeleted() == null) {
ValueSet valueSet = myVersionCanonicalizer.valueSetToCanonical(theResource);
myTerminologySvc.storeTermValueSet(retVal, valueSet);
} else {
myTerminologySvc.deleteValueSetAndChildren(retVal);
}
}
}

View File

@ -26,25 +26,29 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.ResourceReferenceInfo;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.util.TaskChunker;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import jakarta.annotation.Nullable;
import jakarta.persistence.EntityManager;
import jakarta.persistence.FlushModeType;
import jakarta.persistence.PersistenceContext;
import jakarta.persistence.PersistenceContextType;
import jakarta.persistence.PersistenceException;
@ -67,6 +71,7 @@ import org.springframework.context.ApplicationContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
@ -83,6 +88,7 @@ public class TransactionProcessor extends BaseTransactionProcessor {
public static final Pattern SINGLE_PARAMETER_MATCH_URL_PATTERN = Pattern.compile("^[^?]+[?][a-z0-9-]+=[^&,]+$");
private static final Logger ourLog = LoggerFactory.getLogger(TransactionProcessor.class);
public static final int CONDITIONAL_URL_FETCH_CHUNK_SIZE = 100;
@Autowired
private ApplicationContext myApplicationContext;
@ -146,25 +152,51 @@ public class TransactionProcessor extends BaseTransactionProcessor {
List<IBase> theEntries,
StopWatch theTransactionStopWatch) {
ITransactionProcessorVersionAdapter<?, ?> versionAdapter = getVersionAdapter();
RequestPartitionId requestPartitionId =
super.determineRequestPartitionIdForWriteEntries(theRequest, theEntries);
/*
* We temporarily set the flush mode for the duration of the DB transaction
* from the default of AUTO to the temporary value of COMMIT here. We do this
* because in AUTO mode, if any SQL SELECTs are required during the
* processing of an individual transaction entry, the server will flush the
* pending INSERTs/UPDATEs to the database before executing the SELECT.
* This hurts performance since we don't get the benefit of batching those
* write operations as much as possible. The tradeoff here is that we
* could theoretically have transaction operations which try to read
* data previously written in the same transaction, and they won't see it.
* This shouldn't actually be an issue anyhow - we pre-fetch conditional
* URLs and reference targets at the start of the transaction. But this
* tradeoff still feels worth it, since the most common use of transactions
* is for fast writing of data.
*
* Note that it's probably not necessary to reset it back, it should
* automatically go back to the default value after the transaction but
* we reset it just to be safe.
*/
FlushModeType initialFlushMode = myEntityManager.getFlushMode();
try {
myEntityManager.setFlushMode(FlushModeType.COMMIT);
if (requestPartitionId != null) {
preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId);
ITransactionProcessorVersionAdapter<?, ?> versionAdapter = getVersionAdapter();
RequestPartitionId requestPartitionId =
super.determineRequestPartitionIdForWriteEntries(theRequest, theEntries);
if (requestPartitionId != null) {
preFetch(theTransactionDetails, theEntries, versionAdapter, requestPartitionId);
}
return super.doTransactionWriteOperations(
theRequest,
theActionName,
theTransactionDetails,
theAllIds,
theIdSubstitutions,
theIdToPersistedOutcome,
theResponse,
theOriginalRequestOrder,
theEntries,
theTransactionStopWatch);
} finally {
myEntityManager.setFlushMode(initialFlushMode);
}
return super.doTransactionWriteOperations(
theRequest,
theActionName,
theTransactionDetails,
theAllIds,
theIdSubstitutions,
theIdToPersistedOutcome,
theResponse,
theOriginalRequestOrder,
theEntries,
theTransactionStopWatch);
}
private void preFetch(
@ -199,40 +231,100 @@ public class TransactionProcessor extends BaseTransactionProcessor {
RequestPartitionId theRequestPartitionId,
Set<String> foundIds,
List<Long> idsToPreFetch) {
List<IIdType> idsToPreResolve = new ArrayList<>();
FhirTerser terser = myFhirContext.newTerser();
// Key: The ID of the resource
// Value: TRUE if we should prefetch the existing resource details and all stored indexes,
// FALSE if we should prefetch only the identity (resource ID and deleted status)
Map<IIdType, Boolean> idsToPreResolve = new HashMap<>(theEntries.size() * 3);
for (IBase nextEntry : theEntries) {
IBaseResource resource = theVersionAdapter.getResource(nextEntry);
if (resource != null) {
String verb = theVersionAdapter.getEntryRequestVerb(myFhirContext, nextEntry);
/*
* Pre-fetch any resources that are potentially being directly updated by ID
*/
if ("PUT".equals(verb) || "PATCH".equals(verb)) {
String requestUrl = theVersionAdapter.getEntryRequestUrl(nextEntry);
if (countMatches(requestUrl, '/') == 1 && countMatches(requestUrl, '?') == 0) {
if (countMatches(requestUrl, '?') == 0) {
IIdType id = myFhirContext.getVersion().newIdType();
id.setValue(requestUrl);
idsToPreResolve.add(id);
IIdType unqualifiedVersionless = id.toUnqualifiedVersionless();
idsToPreResolve.put(unqualifiedVersionless, Boolean.TRUE);
}
}
/*
* Pre-fetch any resources that are referred to directly by ID (don't replace
* the TRUE flag with FALSE in case we're updating a resource but also
* pointing to that resource elsewhere in the bundle)
*/
if ("PUT".equals(verb) || "POST".equals(verb)) {
for (ResourceReferenceInfo referenceInfo : terser.getAllResourceReferences(resource)) {
IIdType reference = referenceInfo.getResourceReference().getReferenceElement();
if (reference != null
&& !reference.isLocal()
&& !reference.isUuid()
&& reference.hasResourceType()
&& reference.hasIdPart()
&& !reference.getValue().contains("?")) {
idsToPreResolve.putIfAbsent(reference.toUnqualifiedVersionless(), Boolean.FALSE);
}
}
}
}
}
List<JpaPid> outcome =
myIdHelperService.resolveResourcePersistentIdsWithCache(theRequestPartitionId, idsToPreResolve).stream()
.collect(Collectors.toList());
for (JpaPid next : outcome) {
foundIds.add(
next.getAssociatedResourceId().toUnqualifiedVersionless().getValue());
theTransactionDetails.addResolvedResourceId(next.getAssociatedResourceId(), next);
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY
|| !next.getAssociatedResourceId().isIdPartValidLong()) {
idsToPreFetch.add(next.getId());
/*
* If all the entries in the pre-fetch ID map have a value of TRUE, this
* means we only have IDs associated with resources we're going to directly
* update/patch within the transaction. In that case, it's fine to include
* deleted resources, since updating them will bring them back to life.
*
* If we have any FALSE entries, we're also pre-fetching reference targets
* which means we don't want deleted resources, because those are not OK
* to reference.
*/
boolean preFetchIncludesReferences = idsToPreResolve.values().stream().anyMatch(t -> !t);
ResolveIdentityMode resolveMode = preFetchIncludesReferences
? ResolveIdentityMode.excludeDeleted().noCacheUnlessDeletesDisabled()
: ResolveIdentityMode.includeDeleted().cacheOk();
Map<IIdType, IResourceLookup<JpaPid>> outcomes = myIdHelperService.resolveResourceIdentities(
theRequestPartitionId, idsToPreResolve.keySet(), resolveMode);
for (Map.Entry<IIdType, IResourceLookup<JpaPid>> entry : outcomes.entrySet()) {
JpaPid next = (JpaPid) entry.getValue().getPersistentId();
IIdType unqualifiedVersionlessId = entry.getKey();
foundIds.add(unqualifiedVersionlessId.getValue());
theTransactionDetails.addResolvedResourceId(unqualifiedVersionlessId, next);
if (idsToPreResolve.get(unqualifiedVersionlessId) == Boolean.TRUE) {
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY
|| (next.getAssociatedResourceId() != null
&& !next.getAssociatedResourceId().isIdPartValidLong())) {
idsToPreFetch.add(next.getId());
}
}
}
for (IIdType next : idsToPreResolve) {
if (!foundIds.contains(next.toUnqualifiedVersionless().getValue())) {
// Any IDs that could not be resolved are presumably not there, so
// cache that fact so we don't look again later
for (IIdType next : idsToPreResolve.keySet()) {
if (!foundIds.contains(next.getValue())) {
theTransactionDetails.addResolvedResourceId(next.toUnqualifiedVersionless(), null);
}
}
}
@Override
protected void handleVerbChangeInTransactionWriteOperations() {
super.handleVerbChangeInTransactionWriteOperations();
myEntityManager.flush();
}
private void preFetchConditionalUrls(
TransactionDetails theTransactionDetails,
List<IBase> theEntries,
@ -274,12 +366,10 @@ public class TransactionProcessor extends BaseTransactionProcessor {
}
}
new QueryChunker<MatchUrlToResolve>()
.chunk(
searchParameterMapsToResolve,
100,
map -> preFetchSearchParameterMaps(
theTransactionDetails, theRequestPartitionId, map, idsToPreFetch));
TaskChunker.chunk(
searchParameterMapsToResolve,
CONDITIONAL_URL_FETCH_CHUNK_SIZE,
map -> preFetchSearchParameterMaps(theTransactionDetails, theRequestPartitionId, map, idsToPreFetch));
}
/**

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.model.PersistentIdToForcedIdMap;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
@ -35,11 +36,12 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.TaskChunker;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
@ -60,11 +62,12 @@ import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.IdType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@ -79,6 +82,7 @@ import java.util.Set;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.search.builder.predicate.BaseJoiningPredicateBuilder.replaceDefaultPartitionIdIfNonNull;
import static ca.uhn.fhir.model.primitive.IdDt.isValidLong;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
@ -102,6 +106,7 @@ import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class IdHelperService implements IIdHelperService<JpaPid> {
public static final Predicate[] EMPTY_PREDICATE_ARRAY = new Predicate[0];
public static final String RESOURCE_PID = "RESOURCE_PID";
private static final Logger ourLog = LoggerFactory.getLogger(IdHelperService.class);
@Autowired
protected IResourceTableDao myResourceTableDao;
@ -128,20 +133,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
myDontCheckActiveTransactionForUnitTest = theDontCheckActiveTransactionForUnitTest;
}
/**
* Given a forced ID, convert it to its Long value. Since you are allowed to use string IDs for resources, we need to
* convert those to the underlying Long values that are stored, for lookup and comparison purposes.
*
* @throws ResourceNotFoundException If the ID can not be found
*/
@Override
@Nonnull
public IResourceLookup<JpaPid> resolveResourceIdentity(
@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theResourceId)
throws ResourceNotFoundException {
return resolveResourceIdentity(theRequestPartitionId, theResourceType, theResourceId, false);
}
/**
* Given a forced ID, convert it to its Long value. Since you are allowed to use string IDs for resources, we need to
* convert those to the underlying Long values that are stored, for lookup and comparison purposes.
@ -153,48 +144,236 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
@Nonnull
public IResourceLookup<JpaPid> resolveResourceIdentity(
@Nonnull RequestPartitionId theRequestPartitionId,
String theResourceType,
final String theResourceId,
boolean theExcludeDeleted)
@Nullable String theResourceType,
@Nonnull final String theResourceId,
@Nonnull ResolveIdentityMode theMode)
throws ResourceNotFoundException {
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive()
: "no transaction active";
String resourceIdToUse = theResourceId;
if (resourceIdToUse.contains("/")) {
resourceIdToUse = theResourceId.substring(resourceIdToUse.indexOf("/") + 1);
IIdType id;
if (theResourceType != null) {
id = newIdType(theResourceType + "/" + theResourceId);
} else {
id = newIdType(theResourceId);
}
IdDt id = new IdDt(theResourceType, resourceIdToUse);
Map<String, List<IResourceLookup<JpaPid>>> matches =
translateForcedIdToPids(theRequestPartitionId, Collections.singletonList(id), theExcludeDeleted);
List<IIdType> ids = List.of(id);
Map<IIdType, IResourceLookup<JpaPid>> outcome = resolveResourceIdentities(theRequestPartitionId, ids, theMode);
// We only pass 1 input in so only 0..1 will come back
if (matches.isEmpty() || !matches.containsKey(resourceIdToUse)) {
if (!outcome.containsKey(id)) {
throw new ResourceNotFoundException(Msg.code(2001) + "Resource " + id + " is not known");
}
if (matches.size() > 1 || matches.get(resourceIdToUse).size() > 1) {
/*
* This means that:
* 1. There are two resources with the exact same resource type and forced id
* 2. The unique constraint on this column-pair has been dropped
*/
String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId");
throw new PreconditionFailedException(Msg.code(1099) + msg);
return outcome.get(id);
}
@Nonnull
@Override
public Map<IIdType, IResourceLookup<JpaPid>> resolveResourceIdentities(
@Nonnull RequestPartitionId theRequestPartitionId,
Collection<IIdType> theIds,
ResolveIdentityMode theMode) {
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive()
: "no transaction active";
if (theIds.isEmpty()) {
return new HashMap<>();
}
return matches.get(resourceIdToUse).get(0);
Collection<IIdType> ids = new ArrayList<>(theIds);
ids.forEach(id -> Validate.isTrue(id.hasIdPart()));
RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId);
ListMultimap<IIdType, IResourceLookup<JpaPid>> idToLookup =
MultimapBuilder.hashKeys(theIds.size()).arrayListValues(1).build();
// Do we have any FHIR ID lookups cached for any of the IDs
if (theMode.isUseCache(myStorageSettings.isDeleteEnabled()) && !ids.isEmpty()) {
resolveResourceIdentitiesForFhirIdsUsingCache(requestPartitionId, theMode, ids, idToLookup);
}
// We still haven't found IDs, let's look them up in the DB
if (!ids.isEmpty()) {
resolveResourceIdentitiesForFhirIdsUsingDatabase(requestPartitionId, ids, idToLookup);
}
// Convert the multimap into a simple map
Map<IIdType, IResourceLookup<JpaPid>> retVal = new HashMap<>();
for (Map.Entry<IIdType, IResourceLookup<JpaPid>> next : idToLookup.entries()) {
if (next.getValue().getDeleted() != null) {
if (theMode.isFailOnDeleted()) {
String msg = myFhirCtx
.getLocalizer()
.getMessageSanitized(
IdHelperService.class,
"deletedId",
next.getKey().getValue());
throw new ResourceGoneException(Msg.code(2572) + msg);
}
if (!theMode.isIncludeDeleted()) {
continue;
}
}
IResourceLookup previousValue = retVal.put(next.getKey(), next.getValue());
if (previousValue != null) {
/*
* This means that either:
* 1. There are two resources with the exact same resource type and forced
* id. The most likely reason for that is that someone is performing a
* multi-partition search and there are resources on each partition
* with the same ID.
* 2. The unique constraint on the FHIR_ID column has been dropped
*/
ourLog.warn(
"Resource ID[{}] corresponds to lookups: {} and {}",
next.getKey(),
previousValue,
next.getValue());
String msg = myFhirCtx.getLocalizer().getMessage(IdHelperService.class, "nonUniqueForcedId");
throw new PreconditionFailedException(Msg.code(1099) + msg);
}
}
return retVal;
}
/**
* Returns a mapping of Id -> IResourcePersistentId.
* If any resource is not found, it will throw ResourceNotFound exception (and no map will be returned)
* Fetch the resource identity ({@link IResourceLookup}) for a collection of
* resource IDs from the internal memory cache if possible. Note that we only
* use cached results if deletes are disabled on the server (since it is
* therefore not possible that we have an entry in the cache that has since
* been deleted but the cache doesn't know about the deletion), or if we
* aren't excluding deleted results anyhow.
*
* @param theRequestPartitionId The partition(s) to search
* @param theIdsToResolve The IDs we should look up. Any IDs that are resolved
* will be removed from this list. Any IDs remaining in
* the list after calling this method still haven't
* been attempted to be resolved.
* @param theMapToPopulate The results will be populated into this map
*/
@Override
@Nonnull
public Map<String, JpaPid> resolveResourcePersistentIds(
@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, List<String> theIds) {
return resolveResourcePersistentIds(theRequestPartitionId, theResourceType, theIds, false);
private void resolveResourceIdentitiesForFhirIdsUsingCache(
@Nonnull RequestPartitionId theRequestPartitionId,
ResolveIdentityMode theMode,
Collection<IIdType> theIdsToResolve,
ListMultimap<IIdType, IResourceLookup<JpaPid>> theMapToPopulate) {
for (Iterator<IIdType> idIterator = theIdsToResolve.iterator(); idIterator.hasNext(); ) {
IIdType nextForcedId = idIterator.next();
MemoryCacheService.ForcedIdCacheKey nextKey = new MemoryCacheService.ForcedIdCacheKey(
nextForcedId.getResourceType(), nextForcedId.getIdPart(), theRequestPartitionId);
if (theMode.isUseCache(myStorageSettings.isDeleteEnabled())) {
List<IResourceLookup<JpaPid>> cachedLookups = myMemoryCacheService.getIfPresent(
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey);
if (cachedLookups != null && !cachedLookups.isEmpty()) {
idIterator.remove();
for (IResourceLookup<JpaPid> cachedLookup : cachedLookups) {
if (theMode.isIncludeDeleted() || cachedLookup.getDeleted() == null) {
theMapToPopulate.put(nextKey.toIdType(myFhirCtx), cachedLookup);
}
}
}
}
}
}
/**
* Fetch the resource identity ({@link IResourceLookup}) for a collection of
* resource IDs from the database
*
* @param theRequestPartitionId The partition(s) to search
* @param theIdsToResolve The IDs we should look up
* @param theMapToPopulate The results will be populated into this map
*/
private void resolveResourceIdentitiesForFhirIdsUsingDatabase(
RequestPartitionId theRequestPartitionId,
Collection<IIdType> theIdsToResolve,
ListMultimap<IIdType, IResourceLookup<JpaPid>> theMapToPopulate) {
/*
* If we have more than a threshold of IDs, we need to chunk the execution to
* avoid having too many parameters in one SQL statement
*/
int maxPageSize = (SearchBuilder.getMaximumPageSize() / 2) - 10;
if (theIdsToResolve.size() > maxPageSize) {
TaskChunker.chunk(
theIdsToResolve,
maxPageSize,
chunk -> resolveResourceIdentitiesForFhirIdsUsingDatabase(
theRequestPartitionId, chunk, theMapToPopulate));
return;
}
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> criteriaQuery = cb.createTupleQuery();
Root<ResourceTable> from = criteriaQuery.from(ResourceTable.class);
criteriaQuery.multiselect(
from.get("myId"),
from.get("myResourceType"),
from.get("myFhirId"),
from.get("myDeleted"),
from.get("myPartitionIdValue"));
List<Predicate> outerAndPredicates = new ArrayList<>(2);
if (!theRequestPartitionId.isAllPartitions()) {
getOptionalPartitionPredicate(theRequestPartitionId, cb, from).ifPresent(outerAndPredicates::add);
}
// one create one clause per id.
List<Predicate> innerIdPredicates = new ArrayList<>(theIdsToResolve.size());
boolean haveUntypedIds = false;
for (IIdType next : theIdsToResolve) {
if (!next.hasResourceType()) {
haveUntypedIds = true;
}
List<Predicate> idPredicates = new ArrayList<>(2);
if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC
&& next.isIdPartValidLong()) {
Predicate typeCriteria = cb.equal(from.get("myId"), next.getIdPartAsLong());
idPredicates.add(typeCriteria);
} else {
if (isNotBlank(next.getResourceType())) {
Predicate typeCriteria = cb.equal(from.get("myResourceType"), next.getResourceType());
idPredicates.add(typeCriteria);
}
Predicate idCriteria = cb.equal(from.get("myFhirId"), next.getIdPart());
idPredicates.add(idCriteria);
}
innerIdPredicates.add(cb.and(idPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
}
outerAndPredicates.add(cb.or(innerIdPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
criteriaQuery.where(cb.and(outerAndPredicates.toArray(EMPTY_PREDICATE_ARRAY)));
TypedQuery<Tuple> query = myEntityManager.createQuery(criteriaQuery);
List<Tuple> results = query.getResultList();
for (Tuple nextId : results) {
// Check if the nextId has a resource ID. It may have a null resource ID if a commit is still pending.
Long resourcePid = nextId.get(0, Long.class);
String resourceType = nextId.get(1, String.class);
String fhirId = nextId.get(2, String.class);
Date deletedAd = nextId.get(3, Date.class);
Integer partitionId = nextId.get(4, Integer.class);
if (resourcePid != null) {
JpaResourceLookup lookup = new JpaResourceLookup(
resourceType, resourcePid, deletedAd, PartitionablePartitionId.with(partitionId, null));
MemoryCacheService.ForcedIdCacheKey nextKey =
new MemoryCacheService.ForcedIdCacheKey(resourceType, fhirId, theRequestPartitionId);
IIdType id = nextKey.toIdType(myFhirCtx);
theMapToPopulate.put(id, lookup);
if (haveUntypedIds) {
id = nextKey.toIdTypeWithoutResourceType(myFhirCtx);
theMapToPopulate.put(id, lookup);
}
List<IResourceLookup<JpaPid>> valueToCache = theMapToPopulate.get(id);
myMemoryCacheService.putAfterCommit(
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, nextKey, valueToCache);
}
}
}
/**
@ -208,7 +387,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
@Nonnull RequestPartitionId theRequestPartitionId,
String theResourceType,
List<String> theIds,
boolean theExcludeDeleted) {
ResolveIdentityMode theMode) {
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
Validate.notNull(theIds, "theIds cannot be null");
Validate.isTrue(!theIds.isEmpty(), "theIds must not be empty");
@ -224,7 +403,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
// is a forced id
// we must resolve!
if (myStorageSettings.isDeleteEnabled()) {
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theExcludeDeleted)
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, id, theMode)
.getPersistentId();
retVals.put(id, retVal);
} else {
@ -249,18 +428,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
return retVals;
}
/**
* Given a resource type and ID, determines the internal persistent ID for the resource.
*
* @throws ResourceNotFoundException If the ID can not be found
*/
@Override
@Nonnull
public JpaPid resolveResourcePersistentIds(
@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
return resolveResourcePersistentIds(theRequestPartitionId, theResourceType, theId, false);
}
/**
* Given a resource type and ID, determines the internal persistent ID for the resource.
* Optionally filters out deleted resources.
@ -273,11 +440,11 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
@Nonnull RequestPartitionId theRequestPartitionId,
String theResourceType,
String theId,
boolean theExcludeDeleted) {
ResolveIdentityMode theMode) {
Validate.notNull(theId, "theId must not be null");
Map<String, JpaPid> retVal = resolveResourcePersistentIds(
theRequestPartitionId, theResourceType, Collections.singletonList(theId), theExcludeDeleted);
theRequestPartitionId, theResourceType, Collections.singletonList(theId), theMode);
return retVal.get(theId); // should be only one
}
@ -359,11 +526,11 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
idsToCheck.add(nextId);
}
new QueryChunker<IIdType>()
.chunk(
idsToCheck,
SearchBuilder.getMaximumPageSize() / 2,
ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal));
new QueryChunker<IIdType>();
TaskChunker.chunk(
idsToCheck,
SearchBuilder.getMaximumPageSize() / 2,
ids -> doResolvePersistentIds(theRequestPartitionId, ids, retVal));
}
return retVal;
@ -430,18 +597,30 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
RequestPartitionId theRequestPartitionId, CriteriaBuilder cb, Root<ResourceTable> from) {
if (myPartitionSettings.isAllowUnqualifiedCrossPartitionReference()) {
return Optional.empty();
} else if (theRequestPartitionId.isDefaultPartition() && myPartitionSettings.getDefaultPartitionId() == null) {
Predicate partitionIdCriteria = cb.isNull(from.get("myPartitionIdValue"));
return Optional.of(partitionIdCriteria);
} else if (!theRequestPartitionId.isAllPartitions()) {
} else if (theRequestPartitionId.isAllPartitions()) {
return Optional.empty();
} else {
List<Integer> partitionIds = theRequestPartitionId.getPartitionIds();
partitionIds = replaceDefaultPartitionIdIfNonNull(myPartitionSettings, partitionIds);
if (partitionIds.size() > 1) {
Predicate partitionIdCriteria = from.get("myPartitionIdValue").in(partitionIds);
return Optional.of(partitionIdCriteria);
} else if (partitionIds.size() == 1) {
Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue"), partitionIds.get(0));
return Optional.of(partitionIdCriteria);
if (partitionIds.contains(null)) {
Predicate partitionIdNullCriteria =
from.get("myPartitionIdValue").isNull();
if (partitionIds.size() == 1) {
return Optional.of(partitionIdNullCriteria);
} else {
Predicate partitionIdCriteria = from.get("myPartitionIdValue")
.in(partitionIds.stream().filter(t -> t != null).collect(Collectors.toList()));
return Optional.of(cb.or(partitionIdCriteria, partitionIdNullCriteria));
}
} else {
if (partitionIds.size() > 1) {
Predicate partitionIdCriteria =
from.get("myPartitionIdValue").in(partitionIds);
return Optional.of(partitionIdCriteria);
} else if (partitionIds.size() == 1) {
Predicate partitionIdCriteria = cb.equal(from.get("myPartitionIdValue"), partitionIds.get(0));
return Optional.of(partitionIdCriteria);
}
}
}
return Optional.empty();
@ -475,6 +654,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
return retVal;
}
@SuppressWarnings("OptionalAssignedToNull")
@Override
public Optional<String> translatePidIdToForcedIdWithCache(JpaPid theId) {
// do getIfPresent and then put to avoid doing I/O inside the cache.
@ -492,112 +672,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
return forcedId;
}
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
ListMultimap<String, String> typeToIds =
MultimapBuilder.hashKeys().arrayListValues().build();
for (IIdType nextId : theIds) {
if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ANY
|| !isValidPid(nextId)) {
if (nextId.hasResourceType()) {
typeToIds.put(nextId.getResourceType(), nextId.getIdPart());
} else {
typeToIds.put("", nextId.getIdPart());
}
}
}
return typeToIds;
}
private Map<String, List<IResourceLookup<JpaPid>>> translateForcedIdToPids(
@Nonnull RequestPartitionId theRequestPartitionId, Collection<IIdType> theId, boolean theExcludeDeleted) {
theId.forEach(id -> Validate.isTrue(id.hasIdPart()));
if (theId.isEmpty()) {
return new HashMap<>();
}
Map<String, List<IResourceLookup<JpaPid>>> retVal = new HashMap<>();
RequestPartitionId requestPartitionId = replaceDefault(theRequestPartitionId);
if (myStorageSettings.getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
List<Long> pids = theId.stream()
.filter(t -> isValidPid(t))
.map(IIdType::getIdPartAsLong)
.collect(Collectors.toList());
if (!pids.isEmpty()) {
resolvePids(requestPartitionId, pids, retVal);
}
}
// returns a map of resourcetype->id
ListMultimap<String, String> typeToIds = organizeIdsByResourceType(theId);
for (Map.Entry<String, Collection<String>> nextEntry : typeToIds.asMap().entrySet()) {
String nextResourceType = nextEntry.getKey();
Collection<String> nextIds = nextEntry.getValue();
if (!myStorageSettings.isDeleteEnabled()) {
for (Iterator<String> forcedIdIterator = nextIds.iterator(); forcedIdIterator.hasNext(); ) {
String nextForcedId = forcedIdIterator.next();
String nextKey = nextResourceType + "/" + nextForcedId;
IResourceLookup<JpaPid> cachedLookup =
myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey);
if (cachedLookup != null) {
forcedIdIterator.remove();
retVal.computeIfAbsent(nextForcedId, id -> new ArrayList<>())
.add(cachedLookup);
}
}
}
if (!nextIds.isEmpty()) {
Collection<Object[]> views;
assert isNotBlank(nextResourceType);
if (requestPartitionId.isAllPartitions()) {
views = myResourceTableDao.findAndResolveByForcedIdWithNoType(
nextResourceType, nextIds, theExcludeDeleted);
} else {
if (requestPartitionId.isDefaultPartition()) {
views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartitionNull(
nextResourceType, nextIds, theExcludeDeleted);
} else if (requestPartitionId.hasDefaultPartitionId()) {
views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartitionIdOrNullPartitionId(
nextResourceType,
nextIds,
requestPartitionId.getPartitionIdsWithoutDefault(),
theExcludeDeleted);
} else {
views = myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartition(
nextResourceType, nextIds, requestPartitionId.getPartitionIds(), theExcludeDeleted);
}
}
for (Object[] next : views) {
String resourceType = (String) next[0];
Long resourcePid = (Long) next[1];
String forcedId = (String) next[2];
Date deletedAt = (Date) next[3];
Integer partitionId = (Integer) next[4];
LocalDate partitionDate = (LocalDate) next[5];
JpaResourceLookup lookup = new JpaResourceLookup(
resourceType,
resourcePid,
deletedAt,
PartitionablePartitionId.with(partitionId, partitionDate));
retVal.computeIfAbsent(forcedId, id -> new ArrayList<>()).add(lookup);
if (!myStorageSettings.isDeleteEnabled()) {
String key = resourceType + "/" + forcedId;
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, key, lookup);
}
}
}
}
return retVal;
}
public RequestPartitionId replaceDefault(RequestPartitionId theRequestPartitionId) {
if (myPartitionSettings.getDefaultPartitionId() != null) {
if (!theRequestPartitionId.isAllPartitions() && theRequestPartitionId.hasDefaultPartitionId()) {
@ -610,59 +684,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
return theRequestPartitionId;
}
private void resolvePids(
@Nonnull RequestPartitionId theRequestPartitionId,
List<Long> thePidsToResolve,
Map<String, List<IResourceLookup<JpaPid>>> theTargets) {
if (!myStorageSettings.isDeleteEnabled()) {
for (Iterator<Long> forcedIdIterator = thePidsToResolve.iterator(); forcedIdIterator.hasNext(); ) {
Long nextPid = forcedIdIterator.next();
String nextKey = Long.toString(nextPid);
IResourceLookup<JpaPid> cachedLookup =
myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey);
if (cachedLookup != null) {
forcedIdIterator.remove();
theTargets.computeIfAbsent(nextKey, id -> new ArrayList<>()).add(cachedLookup);
}
}
}
if (!thePidsToResolve.isEmpty()) {
Collection<Object[]> lookup;
if (theRequestPartitionId.isAllPartitions()) {
lookup = myResourceTableDao.findLookupFieldsByResourcePid(thePidsToResolve);
} else {
if (theRequestPartitionId.isDefaultPartition()) {
lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionNull(thePidsToResolve);
} else if (theRequestPartitionId.hasDefaultPartitionId()) {
lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIdsOrNullPartition(
thePidsToResolve, theRequestPartitionId.getPartitionIdsWithoutDefault());
} else {
lookup = myResourceTableDao.findLookupFieldsByResourcePidInPartitionIds(
thePidsToResolve, theRequestPartitionId.getPartitionIds());
}
}
lookup.stream()
.map(t -> new JpaResourceLookup(
(String) t[0],
(Long) t[1],
(Date) t[2],
PartitionablePartitionId.with((Integer) t[3], (LocalDate) t[4])))
.forEach(t -> {
String id = t.getPersistentId().toString();
if (!theTargets.containsKey(id)) {
theTargets.put(id, new ArrayList<>());
}
theTargets.get(id).add(t);
if (!myStorageSettings.isDeleteEnabled()) {
String nextKey = t.getPersistentId().toString();
myMemoryCacheService.putAfterCommit(
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, t);
}
});
}
}
@Override
public PersistentIdToForcedIdMap<JpaPid> translatePidsToForcedIds(Set<JpaPid> theResourceIds) {
assert myDontCheckActiveTransactionForUnitTest || TransactionSynchronizationManager.isSynchronizationActive();
@ -673,7 +694,7 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
List<Long> remainingPids =
thePids.stream().filter(t -> !retVal.containsKey(t)).collect(Collectors.toList());
new QueryChunker<Long>().chunk(remainingPids, t -> {
QueryChunker.chunk(remainingPids, t -> {
List<ResourceTable> resourceEntities = myResourceTableDao.findAllById(t);
for (ResourceTable nextResourceEntity : resourceEntities) {
@ -701,33 +722,39 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
* Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods
*/
@Override
public void addResolvedPidToForcedId(
JpaPid theJpaPid,
public void addResolvedPidToFhirId(
@Nonnull JpaPid theJpaPid,
@Nonnull RequestPartitionId theRequestPartitionId,
String theResourceType,
@Nullable String theForcedId,
@Nonnull String theResourceType,
@Nonnull String theFhirId,
@Nullable Date theDeletedAt) {
if (theForcedId != null) {
if (theJpaPid.getAssociatedResourceId() == null) {
populateAssociatedResourceId(theResourceType, theForcedId, theJpaPid);
}
myMemoryCacheService.putAfterCommit(
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
theJpaPid.getId(),
Optional.of(theResourceType + "/" + theForcedId));
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theForcedId);
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid);
} else {
myMemoryCacheService.putAfterCommit(
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theJpaPid.getId(), Optional.empty());
if (theJpaPid.getAssociatedResourceId() == null) {
populateAssociatedResourceId(theResourceType, theFhirId, theJpaPid);
}
if (!myStorageSettings.isDeleteEnabled()) {
JpaResourceLookup lookup = new JpaResourceLookup(
theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
String nextKey = theJpaPid.toString();
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.RESOURCE_LOOKUP, nextKey, lookup);
myMemoryCacheService.putAfterCommit(
MemoryCacheService.CacheEnum.PID_TO_FORCED_ID,
theJpaPid.getId(),
Optional.of(theResourceType + "/" + theFhirId));
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theFhirId);
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theJpaPid);
JpaResourceLookup lookup = new JpaResourceLookup(
theResourceType, theJpaPid.getId(), theDeletedAt, theJpaPid.getPartitionablePartitionId());
MemoryCacheService.ForcedIdCacheKey fhirIdKey =
new MemoryCacheService.ForcedIdCacheKey(theResourceType, theFhirId, theRequestPartitionId);
myMemoryCacheService.putAfterCommit(
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, fhirIdKey, List.of(lookup));
// If it's a pure-numeric ID, store it in the cache without a type as well
// so that we can resolve it this way when loading entities for update
if (myStorageSettings.getResourceClientIdStrategy() == JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC
&& isValidLong(theFhirId)) {
MemoryCacheService.ForcedIdCacheKey fhirIdKeyWithoutType =
new MemoryCacheService.ForcedIdCacheKey(null, theFhirId, theRequestPartitionId);
myMemoryCacheService.putAfterCommit(
MemoryCacheService.CacheEnum.RESOURCE_LOOKUP_BY_FORCED_ID, fhirIdKeyWithoutType, List.of(lookup));
}
}
@ -736,19 +763,6 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
myPartitionSettings = thePartitionSettings;
}
public static boolean isValidPid(IIdType theId) {
if (theId == null) {
return false;
}
String idPart = theId.getIdPart();
return isValidPid(idPart);
}
public static boolean isValidPid(String theIdPart) {
return StringUtils.isNumeric(theIdPart);
}
@Override
@Nonnull
public List<JpaPid> getPidsOrThrowException(
@ -764,7 +778,11 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
if (resourceId == null) {
IIdType id = theResource.getIdElement();
try {
retVal = resolveResourcePersistentIds(theRequestPartitionId, id.getResourceType(), id.getIdPart());
retVal = resolveResourceIdentityPid(
theRequestPartitionId,
id.getResourceType(),
id.getIdPart(),
ResolveIdentityMode.includeDeleted().cacheOk());
} catch (ResourceNotFoundException e) {
retVal = null;
}
@ -836,4 +854,23 @@ public class IdHelperService implements IIdHelperService<JpaPid> {
public JpaPid newPidFromStringIdAndResourceName(String thePid, String theResourceName) {
return JpaPid.fromIdAndResourceType(Long.parseLong(thePid), theResourceName);
}
private IIdType newIdType(String theValue) {
IIdType retVal = myFhirCtx.getVersion().newIdType();
retVal.setValue(theValue);
return retVal;
}
public static boolean isValidPid(IIdType theId) {
if (theId == null) {
return false;
}
String idPart = theId.getIdPart();
return isValidPid(idPart);
}
public static boolean isValidPid(String theIdPart) {
return StringUtils.isNumeric(theIdPart);
}
}

View File

@ -267,13 +267,14 @@ public class TermValueSet implements Serializable {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("id", myId)
.append("url", myUrl)
.append(myResource != null ? ("resource=" + myResource.toString()) : ("resource=(null)"))
.append("version", myVersion)
.append("resourcePid", myResourcePid)
.append("name", myName)
.append(myConcepts != null ? ("concepts - size=" + myConcepts.size()) : ("concepts=(null)"))
.append(myConcepts != null ? ("conceptCount=" + myConcepts.size()) : ("concepts=(null)"))
.append("totalConcepts", myTotalConcepts)
.append("totalConceptDesignations", myTotalConceptDesignations)
.append("expansionStatus", myExpansionStatus)
.append(myResource != null ? ("resId=" + myResource) : ("resource=(null)"))
.toString();
}
}

View File

@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.model.cross;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import java.util.Date;
@ -59,4 +61,14 @@ public class JpaResourceLookup implements IResourceLookup<JpaPid> {
return jpaPid;
}
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("resType", myResourceType)
.append("resPid", myResourcePid)
.append("deletedAt", myDeletedAt)
.append("partId", myPartitionablePartitionId)
.toString();
}
}

View File

@ -36,6 +36,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
@ -49,6 +50,7 @@ import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
@ -84,6 +86,7 @@ import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import ca.uhn.fhir.rest.param.BaseParamWithPrefix;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
@ -117,6 +120,7 @@ import org.apache.commons.lang3.math.NumberUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -533,10 +537,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
throw new InvalidRequestException(Msg.code(2027)
+ "LastN operation is not enabled on this service, can not process this request");
}
return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream()
.map(lastNResourceId -> myIdHelperService.resolveResourcePersistentIds(
myRequestPartitionId, myResourceName, String.valueOf(lastNResourceId)))
.collect(Collectors.toList());
List<IResourcePersistentId> persistentIds = myFulltextSearchSvc.lastN(myParams, theMaximumResults);
return persistentIds.stream().map(t -> (JpaPid) t).collect(Collectors.toList());
} else {
throw new InvalidRequestException(
Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request");
@ -557,7 +559,13 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
idParamValue = idParm.getValue();
}
pid = myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, myResourceName, idParamValue);
pid = myIdHelperService
.resolveResourceIdentity(
myRequestPartitionId,
myResourceName,
idParamValue,
ResolveIdentityMode.includeDeleted().cacheOk())
.getPersistentId();
}
return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails);
}
@ -579,37 +587,46 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
/**
* Combs through the params for any _id parameters and extracts the PIDs for them
*
* @param theTargetPids
*/
private void extractTargetPidsFromIdParams(Set<Long> theTargetPids) {
// get all the IQueryParameterType objects
// for _id -> these should all be StringParam values
HashSet<String> ids = new HashSet<>();
HashSet<IIdType> ids = new HashSet<>();
List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID);
for (List<IQueryParameterType> paramList : params) {
for (IQueryParameterType param : paramList) {
String id;
if (param instanceof StringParam) {
// we expect all _id values to be StringParams
ids.add(((StringParam) param).getValue());
id = ((StringParam) param).getValue();
} else if (param instanceof TokenParam) {
ids.add(((TokenParam) param).getValue());
id = ((TokenParam) param).getValue();
} else {
// we do not expect the _id parameter to be a non-string value
throw new IllegalArgumentException(
Msg.code(1193) + "_id parameter must be a StringParam or TokenParam");
}
IIdType idType = myContext.getVersion().newIdType();
if (id.contains("/")) {
idType.setValue(id);
} else {
idType.setValue(myResourceName + "/" + id);
}
ids.add(idType);
}
}
// fetch our target Pids
// this will throw if an id is not found
Map<String, JpaPid> idToPid = myIdHelperService.resolveResourcePersistentIds(
myRequestPartitionId, myResourceName, new ArrayList<>(ids));
Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities(
myRequestPartitionId,
new ArrayList<>(ids),
ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled());
// add the pids to targetPids
for (JpaPid pid : idToPid.values()) {
theTargetPids.add(pid.getId());
for (IResourceLookup pid : idToIdentity.values()) {
theTargetPids.add((Long) pid.getPersistentId().getId());
}
}

View File

@ -21,24 +21,27 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.TokenParamModifier;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import com.healthmarketscience.sqlbuilder.Condition;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
import jakarta.annotation.Nullable;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
@ -68,9 +71,8 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
Set<JpaPid> allOrPids = null;
SearchFilterParser.CompareOperation defaultOperation = SearchFilterParser.CompareOperation.eq;
boolean allIdsAreForcedIds = true;
for (List<? extends IQueryParameterType> nextValue : theValues) {
Set<JpaPid> orPids = new HashSet<>();
Set<IIdType> ids = new LinkedHashSet<>();
boolean haveValue = false;
for (IQueryParameterType next : nextValue) {
String value = next.getValueAsQueryToken(getFhirContext());
@ -78,21 +80,14 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
value = value.substring(1);
}
IdType valueAsId = new IdType(value);
if (isNotBlank(value)) {
if (!myIdHelperService.idRequiresForcedId(valueAsId.getIdPart()) && allIdsAreForcedIds) {
allIdsAreForcedIds = false;
}
haveValue = true;
try {
boolean excludeDeleted = true;
JpaPid pid = myIdHelperService.resolveResourcePersistentIds(
theRequestPartitionId, theResourceName, valueAsId.getIdPart(), excludeDeleted);
orPids.add(pid);
} catch (ResourceNotFoundException e) {
// This is not an error in a search, it just results in no matches
ourLog.debug("Resource ID {} was requested but does not exist", valueAsId.getIdPart());
if (!value.contains("/")) {
value = theResourceName + "/" + value;
}
IIdType id = getFhirContext().getVersion().newIdType();
id.setValue(value);
ids.add(id);
}
if (next instanceof TokenParam) {
@ -101,6 +96,20 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
}
}
}
Set<JpaPid> orPids = new HashSet<>();
// We're joining this to a query that will explicitly ask for non-deleted,
// so we really only want the PID and can safely cache (even if a previously
// deleted status was cached, since it might now be undeleted)
Map<IIdType, IResourceLookup<JpaPid>> resolvedPids = myIdHelperService.resolveResourceIdentities(
theRequestPartitionId,
ids,
ResolveIdentityMode.includeDeleted().cacheOk());
for (IResourceLookup<JpaPid> lookup : resolvedPids.values()) {
orPids.add(lookup.getPersistentId());
}
if (haveValue) {
if (allOrPids == null) {
allOrPids = orPids;
@ -122,17 +131,19 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
List<Long> resourceIds = JpaPid.toLongList(allOrPids);
if (theSourceJoinColumn == null) {
BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(!allIdsAreForcedIds);
BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(true);
Condition predicate;
switch (operation) {
default:
case eq:
predicate = queryRootTable.createPredicateResourceIds(false, resourceIds);
return queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
break;
case ne:
predicate = queryRootTable.createPredicateResourceIds(true, resourceIds);
return queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
break;
}
predicate = queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
return predicate;
} else {
DbColumn resIdColumn = getResourceIdColumn(theSourceJoinColumn);
return QueryParameterUtils.toEqualToOrInPredicate(

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
@ -382,8 +383,11 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
// Note that this creates the TermCodeSystem and TermCodeSystemVersion entities if needed
IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource, theRequest);
JpaPid codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds(
RequestPartitionId.allPartitions(), csId.getResourceType(), csId.getIdPart());
JpaPid codeSystemResourcePid = myIdHelperService.resolveResourceIdentityPid(
RequestPartitionId.allPartitions(),
csId.getResourceType(),
csId.getIdPart(),
ResolveIdentityMode.includeDeleted().cacheOk());
ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid.getId());
ourLog.info("CodeSystem resource has ID: {}", csId.getValue());

View File

@ -34,6 +34,7 @@ import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.config.util.ConnectionPoolInfoProvider;
import ca.uhn.fhir.jpa.config.util.IConnectionPoolInfoProvider;
@ -470,7 +471,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
myCodeSystemCurrentVersionCache.invalidateAll();
}
public void deleteValueSetForResource(ResourceTable theResourceTable) {
public Optional<TermValueSet> deleteValueSetForResource(ResourceTable theResourceTable) {
// Get existing entity so it can be deleted.
Optional<TermValueSet> optionalExistingTermValueSetById =
myTermValueSetDao.findByResourcePid(theResourceTable.getId());
@ -481,8 +482,19 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
ourLog.info("Deleting existing TermValueSet[{}] and its children...", existingTermValueSet.getId());
deletePreCalculatedValueSetContents(existingTermValueSet);
myTermValueSetDao.deleteById(existingTermValueSet.getId());
/*
* If we're updating an existing ValueSet within a transaction, we need to make
* sure to manually flush now since otherwise we'll try to create a new
* TermValueSet entity and fail with a constraint error on the URL, since
* this one won't be deleted yet
*/
myTermValueSetDao.flush();
ourLog.info("Done deleting existing TermValueSet[{}] and its children.", existingTermValueSet.getId());
}
return optionalExistingTermValueSetById;
}
private void deletePreCalculatedValueSetContents(TermValueSet theValueSet) {
@ -2081,10 +2093,11 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
}
private JpaPid getValueSetResourcePersistentId(ValueSet theValueSet) {
return myIdHelperService.resolveResourcePersistentIds(
return myIdHelperService.resolveResourceIdentityPid(
RequestPartitionId.allPartitions(),
theValueSet.getIdElement().getResourceType(),
theValueSet.getIdElement().getIdPart());
theValueSet.getIdElement().getIdPart(),
ResolveIdentityMode.includeDeleted().cacheOk());
}
protected IValidationSupport.CodeValidationResult validateCodeIsInPreExpandedValueSet(
@ -2527,6 +2540,13 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
@Override
@Transactional
public void storeTermValueSet(ResourceTable theResourceTable, ValueSet theValueSet) {
// If we're in a transaction, we need to flush now so that we can correctly detect
// duplicates if there are multiple ValueSets in the same TX with the same URL
// (which is an error, but we need to catch it). It'd be better to catch this by
// inspecting the URLs in the bundle or something, since flushing hurts performance
// but it's not expected that loading valuesets is going to be a huge high frequency
// thing so it probably doesn't matter
myEntityManager.flush();
ValidateUtil.isTrueOrThrowInvalidRequest(theResourceTable != null, "No resource supplied");
if (isPlaceholder(theValueSet)) {
@ -2552,7 +2572,7 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
termValueSet.setName(theValueSet.hasName() ? theValueSet.getName() : null);
// Delete version being replaced
deleteValueSetForResource(theResourceTable);
Optional<TermValueSet> deletedTrmValueSet = deleteValueSetForResource(theResourceTable);
/*
* Do the upload.
@ -2560,11 +2580,17 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
String url = termValueSet.getUrl();
String version = termValueSet.getVersion();
Optional<TermValueSet> optionalExistingTermValueSetByUrl;
if (version != null) {
optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndVersion(url, version);
if (deletedTrmValueSet.isPresent()
&& Objects.equals(deletedTrmValueSet.get().getUrl(), url)
&& Objects.equals(deletedTrmValueSet.get().getVersion(), version)) {
// If we just deleted the valueset marker, we don't need to check if it exists
// in the database
optionalExistingTermValueSetByUrl = Optional.empty();
} else {
optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(url);
optionalExistingTermValueSetByUrl = getTermValueSet(version, url);
}
if (optionalExistingTermValueSetByUrl.isEmpty()) {
myTermValueSetDao.save(termValueSet);
@ -2602,6 +2628,16 @@ public class TermReadSvcImpl implements ITermReadSvc, IHasScheduledJobs {
}
}
private Optional<TermValueSet> getTermValueSet(String version, String url) {
Optional<TermValueSet> optionalExistingTermValueSetByUrl;
if (version != null) {
optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndVersion(url, version);
} else {
optionalExistingTermValueSetByUrl = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(url);
}
return optionalExistingTermValueSetByUrl;
}
@Override
@Transactional
public IFhirResourceDaoCodeSystem.SubsumesResult subsumes(

View File

@ -35,11 +35,11 @@ import java.util.stream.Stream;
*/
public class QueryChunker<T> extends TaskChunker<T> {
public void chunk(Collection<T> theInput, Consumer<List<T>> theBatchConsumer) {
public static <T> void chunk(Collection<T> theInput, Consumer<List<T>> theBatchConsumer) {
chunk(theInput, SearchBuilder.getMaximumPageSize(), theBatchConsumer);
}
public Stream<List<T>> chunk(Stream<T> theStream) {
public static <T> Stream<List<T>> chunk(Stream<T> theStream) {
return chunk(theStream, SearchBuilder.getMaximumPageSize());
}
}

View File

@ -3,36 +3,39 @@ package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import jakarta.persistence.EntityManager;
import jakarta.persistence.Tuple;
import jakarta.persistence.criteria.Path;
import jakarta.persistence.criteria.Root;
import jakarta.persistence.TypedQuery;
import jakarta.persistence.criteria.CriteriaQuery;
import org.hibernate.sql.results.internal.TupleImpl;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentMatchers;
import org.mockito.Answers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
@ -40,7 +43,6 @@ import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class IdHelperServiceTest {
@ -60,29 +62,33 @@ public class IdHelperServiceTest {
@Mock
private MemoryCacheService myMemoryCacheService;
@Mock
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private EntityManager myEntityManager;
@Mock
private PartitionSettings myPartitionSettings;
@BeforeEach
@Mock
private TypedQuery myTypedQuery;
@BeforeEach
void setUp() {
myHelperSvc.setDontCheckActiveTransactionForUnitTest(true);
// lenient because some tests require this setup, and others do not
lenient().doReturn(true).when(myStorageSettings).isDeleteEnabled();
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy();
}
@Test
public void testResolveResourcePersistentIds() {
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy();
//prepare params
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A");
String resourceType = "Patient";
Long id = 123L;
List<String> ids = List.of(String.valueOf(id));
boolean theExcludeDeleted = false;
ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled();
//prepare results
Patient expectedPatient = new Patient();
@ -91,19 +97,21 @@ public class IdHelperServiceTest {
// configure mock behaviour
when(myStorageSettings.isDeleteEnabled()).thenReturn(true);
final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted));
final ResourceNotFoundException resourceNotFoundException = assertThrows(ResourceNotFoundException.class, () -> myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode));
assertEquals("HAPI-2001: Resource Patient/123 is not known", resourceNotFoundException.getMessage());
}
@Test
public void testResolveResourcePersistentIdsDeleteFalse() {
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY).when(myStorageSettings).getResourceClientIdStrategy();
//prepare Params
RequestPartitionId requestPartitionId = RequestPartitionId.fromPartitionIdAndName(1, "Partition-A");
Long id = 123L;
String resourceType = "Patient";
List<String> ids = List.of(String.valueOf(id));
String forcedId = "(all)/" + resourceType + "/" + id;
boolean theExcludeDeleted = false;
ResolveIdentityMode mode = ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled();
//prepare results
Patient expectedPatient = new Patient();
@ -112,7 +120,7 @@ public class IdHelperServiceTest {
// configure mock behaviour
when(myStorageSettings.isDeleteEnabled()).thenReturn(false);
Map<String, JpaPid> actualIds = myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, theExcludeDeleted);
Map<String, JpaPid> actualIds = myHelperSvc.resolveResourcePersistentIds(requestPartitionId, resourceType, ids, mode);
//verifyResult
assertFalse(actualIds.isEmpty());
@ -120,20 +128,35 @@ public class IdHelperServiceTest {
}
private Root<ResourceTable> getMockedFrom() {
@SuppressWarnings("unchecked")
Path<Object> path = mock(Path.class);
@SuppressWarnings("unchecked")
Root<ResourceTable> from = mock(Root.class);
when(from.get(ArgumentMatchers.<String>any())).thenReturn(path);
return from;
}
private List<Tuple> getMockedTupleList(Long idNumber, String resourceType, String id) {
Tuple tuple = mock(Tuple.class);
when(tuple.get(eq(0), eq(Long.class))).thenReturn(idNumber);
when(tuple.get(eq(1), eq(String.class))).thenReturn(resourceType);
when(tuple.get(eq(2), eq(String.class))).thenReturn(id);
return List.of(tuple);
}
@Test
public void testResolveResourceIdentity_defaultFunctionality(){
lenient().doReturn(JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC).when(myStorageSettings).getResourceClientIdStrategy();
RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition");
String resourceType = "Patient";
String resourceForcedId = "AAA";
Object[] tuple = new Object[] {
1L,
"Patient",
"AAA",
new Date(),
null
};
when(myEntityManager.createQuery(any(CriteriaQuery.class))).thenReturn(myTypedQuery);
when(myTypedQuery.getResultList()).thenReturn(List.of(
new TupleImpl(null, tuple)
));
IResourceLookup<JpaPid> result = myHelperSvc.resolveResourceIdentity(partitionId, resourceType, resourceForcedId, ResolveIdentityMode.includeDeleted().noCacheUnlessDeletesDisabled());
assertEquals(tuple[0], result.getPersistentId().getId());
assertEquals(tuple[1], result.getResourceType());
assertEquals(tuple[3], result.getDeleted());
}
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -72,7 +72,7 @@ abstract public class BaseR4SearchLastN extends BaseJpaTest {
protected static IIdType patient2Id = null;
// Using static variables including the flag below so that we can initalize the database and indexes once
// (all of the tests only read from the DB and indexes and so no need to re-initialze them for each test).
private static Calendar observationDate = new GregorianCalendar();
private static final Calendar observationDate = new GregorianCalendar();
protected final String observationCd0 = "code0";
protected final String observationCd1 = "code1";
protected final String observationCd2 = "code2";
@ -120,9 +120,10 @@ abstract public class BaseR4SearchLastN extends BaseJpaTest {
// enabled to also create extended lucene index during creation of test data
boolean hsearchSaved = myStorageSettings.isAdvancedHSearchIndexing();
myStorageSettings.setAdvancedHSearchIndexing(true);
myStorageSettings.setDeleteEnabled(false);
// Using a static flag to ensure that test data and elasticsearch index is only created once.
// Creating this data and the index is time consuming and as such want to avoid having to repeat for each test.
// Creating this data and the index is time-consuming and as such want to avoid having to repeat for each test.
// Normally would use a static @BeforeClass method for this purpose, but Autowired objects cannot be accessed in static methods.
Patient pt = new Patient();

View File

@ -120,9 +120,10 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN {
ourLog.info("Queries:\n * " + String.join("\n * ", queries));
// 1 query to resolve the subject PIDs
// 3 queries to actually perform the search
// 1 query to lookup up Search from cache, and 2 chunked queries to retrieve resources by PID.
assertThat(queries).hasSize(6);
assertThat(queries).hasSize(7);
// The first chunked query should have a full complement of PIDs
StringBuilder firstQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'");
@ -130,7 +131,7 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN {
firstQueryPattern.append(",'[0-9]+'");
}
firstQueryPattern.append("\\).*");
assertThat(queries.get(4)).matches(firstQueryPattern.toString());
assertThat(queries.get(5)).matches(firstQueryPattern.toString());
// the second chunked query should be padded with "-1".
StringBuilder secondQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'");
@ -141,7 +142,7 @@ public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN {
secondQueryPattern.append(",'-1'");
}
secondQueryPattern.append("\\).*");
assertThat(queries.get(5)).matches(secondQueryPattern.toString());
assertThat(queries.get(6)).matches(secondQueryPattern.toString());
}

View File

@ -79,6 +79,10 @@ public class FhirResourceDaoR4SearchLastNIT extends BaseR4SearchLastN {
// Set chunk size to 50
SearchBuilder.setMaxPageSizeForTest(50);
// Run once to fill caches
toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(), null));
// Actually test
myCaptureQueriesListener.clear();
List<String> results = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(), null));
assertThat(results).hasSize(75);

View File

@ -277,8 +277,7 @@ public class ValueSetExpansionR4ElasticsearchIT extends BaseJpaTest implements I
myTermCodeSystemStorageSvc.storeNewCodeSystemVersion(codeSystem, codeSystemVersion,
new SystemRequestDetails(), Collections.singletonList(valueSet), Collections.emptyList());
// myTerminologyDeferredStorageSvc.saveAllDeferred();
await().atMost(10, SECONDS).until(() -> {
await().atMost(20, SECONDS).until(() -> {
myTerminologyDeferredStorageSvc.saveDeferred();
return myTerminologyDeferredStorageSvc.isStorageQueueEmpty(true);
});

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,7 +3,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -53,7 +53,7 @@ import java.util.Date;
import java.util.List;
import java.util.Optional;
public class MdmLinkDaoSvc<P extends IResourcePersistentId, M extends IMdmLink<P>> {
public class MdmLinkDaoSvc<P extends IResourcePersistentId<?>, M extends IMdmLink<P>> {
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.mdm.dao.MdmLinkDaoSvc;
import ca.uhn.fhir.mdm.api.IGoldenResourceMergerSvc;
import ca.uhn.fhir.mdm.api.IMdmLink;
@ -235,10 +236,11 @@ public class GoldenResourceMergerSvcImpl implements IGoldenResourceMergerSvc {
List<? extends IMdmLink> toLinks = myMdmLinkDaoSvc.findMdmLinksByGoldenResource(theToResource);
List<IMdmLink> toDelete = new ArrayList<>();
IResourcePersistentId goldenResourcePid = myIdHelperService.resolveResourcePersistentIds(
IResourcePersistentId goldenResourcePid = myIdHelperService.resolveResourceIdentityPid(
getPartitionIdForResource(theToResource),
theToResource.getIdElement().getResourceType(),
theToResource.getIdElement().getIdPart());
theToResource.getIdElement().getIdPart(),
ResolveIdentityMode.includeDeleted().cacheOk());
// reassign links:
// to <- from

View File

@ -44,7 +44,7 @@ import java.util.Set;
import java.util.stream.Collectors;
@Service
public class FindCandidateByExampleSvc<P extends IResourcePersistentId> extends BaseCandidateFinder {
public class FindCandidateByExampleSvc<P extends IResourcePersistentId<?>> extends BaseCandidateFinder {
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();
@Autowired

View File

@ -7,7 +7,10 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.cross.JpaResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc;
import ca.uhn.fhir.mdm.api.IMdmSettings;
import ca.uhn.fhir.mdm.api.MdmLinkSourceEnum;
@ -24,6 +27,7 @@ import ca.uhn.fhir.mdm.util.MdmResourceUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
@ -127,7 +131,7 @@ public class MdmSurvivorshipSvcImplTest {
List<IBaseResource> resources = new ArrayList<>();
List<MdmLinkJson> links = new ArrayList<>();
Map<String, IResourcePersistentId> sourceIdToPid = new HashMap<>();
Map<IIdType, IResourceLookup<JpaPid>> sourceIdToPid = new HashMap<>();
for (int i = 0; i < 10; i++) {
// we want our resources to be slightly different
Patient patient = new Patient();
@ -152,7 +156,7 @@ public class MdmSurvivorshipSvcImplTest {
link.setSourceId(patient.getId());
link.setGoldenResourceId(goldenPatient.getId());
links.add(link);
sourceIdToPid.put(patient.getId(), link.getSourcePid());
sourceIdToPid.put(patient.getIdElement(), new JpaResourceLookup("Patient", (Long) link.getSourcePid().getId(), null, new PartitionablePartitionId()));
}
IFhirResourceDao resourceDao = mock(IFhirResourceDao.class);
@ -176,7 +180,7 @@ public class MdmSurvivorshipSvcImplTest {
when(myMdmSettings.getMdmRules())
.thenReturn(new MdmRulesJson());
doReturn(sourceIdToPid).when(myIIdHelperService)
.resolveResourcePersistentIds(any(RequestPartitionId.class), anyString(), any(List.class));
.resolveResourceIdentities(any(RequestPartitionId.class), any(List.class), any());
// we will return a non-empty list to reduce mocking
when(myEIDHelper.getExternalEid(any()))
.thenReturn(Collections.singletonList(new CanonicalEID("example", "value", "use")));
@ -199,14 +203,14 @@ public class MdmSurvivorshipSvcImplTest {
verify(resourceDao)
.update(eq(goldenPatientRebuilt), any(RequestDetails.class));
ArgumentCaptor<List<String>> idsCaptor = ArgumentCaptor.forClass(List.class);
verify(myIIdHelperService).resolveResourcePersistentIds(any(RequestPartitionId.class), anyString(), idsCaptor.capture());
ArgumentCaptor<List<IIdType>> idsCaptor = ArgumentCaptor.forClass(List.class);
verify(myIIdHelperService).resolveResourceIdentities(any(RequestPartitionId.class), idsCaptor.capture(), any());
assertNotNull(idsCaptor.getValue());
assertFalse(idsCaptor.getValue().isEmpty());
for (String id : idsCaptor.getValue()) {
assertFalse(id.contains("/"));
assertFalse(id.contains("Patient"));
}
List<String> ids = idsCaptor.getValue().stream().map(IIdType::getValue).toList();
List<String> expectedIds = resources.stream().map(t -> t.getIdElement().toUnqualifiedVersionless().getValue()).toList();
assertThat(ids).asList().containsExactlyInAnyOrder(expectedIds.toArray());
}
private MdmTransactionContext createTransactionContext() {

View File

@ -103,7 +103,7 @@ class MdmClearStepTest extends BaseMdmR4Test {
assertPatientExists(myGoldenId);
fail("Resource cannot be found");
} catch (ResourceNotFoundException e) {
assertEquals("HAPI-2001: Resource " + myGoldenId + " is not known", e.getMessage());
assertEquals("HAPI-1996: Resource " + myGoldenId + " is not known", e.getMessage());
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -334,6 +334,8 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
}
public void setTransientForcedId(String theTransientForcedId) {
assert theTransientForcedId == null || !theTransientForcedId.contains("/")
: "Invalid FHIR ID: " + theTransientForcedId;
myTransientForcedId = theTransientForcedId;
}
}

View File

@ -383,9 +383,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@Transient
private transient ResourceHistoryTable myCurrentVersionEntity;
@Transient
private transient ResourceHistoryTable myNewVersionEntity;
@Transient
private transient boolean myVersionUpdatedInCurrentTransaction;
@ -836,16 +833,8 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
* multiple {@link ResourceHistoryTable} entities will result in a constraint error.
*/
public ResourceHistoryTable toHistory(boolean theCreateVersionTags) {
boolean createVersionTags = theCreateVersionTags;
ResourceHistoryTable retVal = myNewVersionEntity;
if (retVal == null) {
retVal = new ResourceHistoryTable();
myNewVersionEntity = retVal;
} else {
// Tags should already be set
createVersionTags = false;
}
ResourceHistoryTable retVal = new ResourceHistoryTable();
retVal.setResourceId(myId);
retVal.setResourceType(myResourceType);
@ -855,12 +844,20 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
retVal.setPartitionId(getPartitionId());
retVal.setHasTags(isHasTags());
if (isHasTags() && createVersionTags) {
if (isHasTags() && theCreateVersionTags) {
for (ResourceTag next : getTags()) {
retVal.addTag(next);
}
}
// If we've deleted and updated the same resource in the same transaction,
// we need to actually create 2 distinct versions
if (getCurrentVersionEntity() != null
&& getCurrentVersionEntity().getId() != null
&& getVersion() == getCurrentVersionEntity().getVersion()) {
myVersion++;
}
populateHistoryEntityVersionAndDates(retVal);
return retVal;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,10 +1,5 @@
package ca.uhn.fhir.jpa.dao.dstu2;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.assertFalse;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
@ -84,6 +79,8 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import java.io.IOException;
import java.util.ArrayList;
@ -95,7 +92,11 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.defaultString;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@ -439,37 +440,20 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
}
}
@Test
public void testCreateWithIllegalReference() {
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testCreateWithIllegalReference_WrongTypeForId(boolean theClientAssignedId) {
IIdType id1;
Observation o1 = new Observation();
o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01");
IIdType id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless();
try {
Patient p = new Patient();
p.getManagingOrganization().setReference(id1);
myPatientDao.create(p, mySrd);
fail("");
} catch (UnprocessableEntityException e) {
assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage());
if (theClientAssignedId) {
o1.setId("A");
id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless();
} else {
id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless();
}
try {
Patient p = new Patient();
p.getManagingOrganization().setReference(new IdDt("Organization", id1.getIdPart()));
myPatientDao.create(p, mySrd);
fail("");
} catch (UnprocessableEntityException e) {
assertEquals(Msg.code(1095) + "Resource contains reference to unknown resource ID Organization/" + id1.getIdPart(), e.getMessage());
}
// Now with a forced ID
o1 = new Observation();
o1.setId("testCreateWithIllegalReference");
o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01");
id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless();
try {
Patient p = new Patient();
p.getManagingOrganization().setReference(id1);
@ -485,9 +469,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
myPatientDao.create(p, mySrd);
fail("");
} catch (InvalidRequestException e) {
assertEquals(Msg.code(1094) + "Resource Organization/testCreateWithIllegalReference not found, specified in path: Patient.managingOrganization", e.getMessage());
assertEquals(Msg.code(1094) + "Resource Organization/" + id1.getIdPart() + " not found, specified in path: Patient.managingOrganization", e.getMessage());
}
}
@Test
@ -677,8 +660,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
}
/**
* See #773
*/
* See #773
*/
@Test
public void testDeleteResourceWithOutboundDeletedResources() {
myStorageSettings.setEnforceReferentialIntegrityOnDelete(false);
@ -1387,8 +1370,8 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
}
/**
* See #196
*/
* See #196
*/
@Test
public void testInvalidChainNames() {
ReferenceParam param = null;
@ -1678,46 +1661,6 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
}
@Test
public void testReadInvalidVersion() throws Exception {
String methodName = "testReadInvalidVersion";
Patient pat = new Patient();
pat.addIdentifier().setSystem("urn:system").setValue(methodName);
IIdType id = myPatientDao.create(pat, mySrd).getId();
assertEquals(methodName, myPatientDao.read(id, mySrd).getIdentifier().get(0).getValue());
try {
myPatientDao.read(id.withVersion("0"), mySrd);
fail("");
} catch (ResourceNotFoundException e) {
assertEquals(Msg.code(979) + "Version \"0\" is not valid for resource Patient/" + id.getIdPart(), e.getMessage());
}
try {
myPatientDao.read(id.withVersion("2"), mySrd);
fail("");
} catch (ResourceNotFoundException e) {
assertEquals(Msg.code(979) + "Version \"2\" is not valid for resource Patient/" + id.getIdPart(), e.getMessage());
}
try {
myPatientDao.read(id.withVersion("H"), mySrd);
fail("");
} catch (ResourceNotFoundException e) {
assertEquals(Msg.code(978) + "Version \"H\" is not valid for resource Patient/" + id.getIdPart(), e.getMessage());
}
try {
myPatientDao.read(new IdDt("Patient/9999999999999/_history/1"), mySrd);
fail("");
} catch (ResourceNotFoundException e) {
assertEquals(Msg.code(1996) + "Resource Patient/9999999999999/_history/1 is not known", e.getMessage());
}
}
@Test
public void testReadWithDeletedResource() {
String methodName = "testReadWithDeletedResource";

View File

@ -2574,25 +2574,25 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test {
@Test
public void testUpdateRejectsInvalidTypes() {
Patient p1 = new Patient();
p1.addIdentifier().setSystem("urn:system").setValue("testUpdateRejectsInvalidTypes");
p1.addName().addFamily("Tester").addGiven("testUpdateRejectsInvalidTypes");
IdDt p1id = (IdDt) myClient.create().resource(p1).execute().getId();
Patient patient = new Patient();
patient.addIdentifier().setSystem("urn:system").setValue("testUpdateRejectsInvalidTypes");
patient.addName().addFamily("Tester").addGiven("testUpdateRejectsInvalidTypes");
IdDt p1id = (IdDt) myClient.create().resource(patient).execute().getId();
Organization p2 = new Organization();
p2.getNameElement().setValue("testUpdateRejectsInvalidTypes");
Organization org = new Organization();
org.getNameElement().setValue("testUpdateRejectsInvalidTypes");
try {
myClient.update().resource(p2).withId("Organization/" + p1id.getIdPart()).execute();
myClient.update().resource(org).withId("Organization/" + p1id.getIdPart()).execute();
fail("");
} catch (UnprocessableEntityException e) {
// good
assertThat(e.getMessage()).contains("HAPI-0930: Existing resource ID[Patient/" + p1id.getIdPart() + "] is of type[Patient] - Cannot update with [Organization]");
}
try {
myClient.update().resource(p2).withId("Patient/" + p1id.getIdPart()).execute();
myClient.update().resource(org).withId("Patient/" + p1id.getIdPart()).execute();
fail("");
} catch (UnprocessableEntityException e) {
// good
assertThat(e.getMessage()).contains("HAPI-0930: Existing resource ID[Patient/" + p1id.getIdPart() + "] is of type[Patient] - Cannot update with [Organization]");
}
}

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -798,57 +798,6 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
}
@Test
public void testCreateWithIllegalReference() {
Observation o1 = new Observation();
o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01");
IIdType id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless();
try {
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(id1);
myPatientDao.create(p, mySrd);
fail("");
} catch (UnprocessableEntityException e) {
assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage());
}
try {
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(new IdType("Organization", id1.getIdPart()));
myPatientDao.create(p, mySrd);
fail("");
} catch (UnprocessableEntityException e) {
assertEquals(Msg.code(1095) + "Resource contains reference to unknown resource ID Organization/" + id1.getIdPart(), e.getMessage());
}
// Now with a forced ID
o1 = new Observation();
o1.setId("testCreateWithIllegalReference");
o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01");
id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless();
try {
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(id1);
myPatientDao.create(p, mySrd);
fail("");
} catch (UnprocessableEntityException e) {
assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage());
}
try {
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(new IdType("Organization", id1.getIdPart()));
myPatientDao.create(p, mySrd);
fail("");
} catch (InvalidRequestException e) {
assertEquals(Msg.code(1094) + "Resource Organization/testCreateWithIllegalReference not found, specified in path: Patient.managingOrganization", e.getMessage());
}
}
@Test
public void testCreateWithInvalid() {
Observation o1 = new Observation();
@ -2345,7 +2294,7 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
myPatientDao.read(new IdType("Patient/9999999999999/_history/1"), mySrd);
fail("");
} catch (ResourceNotFoundException e) {
assertEquals(Msg.code(1996) + "Resource Patient/9999999999999/_history/1 is not known", e.getMessage());
assertEquals(Msg.code(2001) + "Resource Patient/9999999999999 is not known", e.getMessage());
}
}

View File

@ -1024,6 +1024,8 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
ValueSet updatedValueSet_v1 = valueSet_v1;
updatedValueSet_v1.setName(valueSet_v1.getName().concat(" - MODIFIED"));
logAllValueSets();
String url = myClient.getServerBase().concat("/").concat(myExtensionalVsId_v1.getValueAsString());
Bundle bundle = new Bundle();
bundle.setType(Bundle.BundleType.TRANSACTION);
@ -1056,7 +1058,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
.getRequest()
.setMethod(Bundle.HTTPVerb.PUT)
.setUrl("ValueSet/" + updatedValueSet_v2.getIdElement().getIdPart());
ourLog.debug("Transaction Bundle:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle));
ourLog.info("Transaction Bundle:\n" + myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(bundle));
myClient.transaction().withBundle(bundle).execute();
updatedValueSet_v2 = myValueSetDao.read(myExtensionalVsId_v2);

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -71,6 +71,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.isNotNull;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.lenient;
@ -136,7 +137,7 @@ class BaseHapiFhirResourceDaoTest {
private ArgumentCaptor<SearchParameterMap> mySearchParameterMapCaptor;
// we won't inject this
private FhirContext myFhirContext = FhirContext.forR4Cached();
private final FhirContext myFhirContext = FhirContext.forR4Cached();
@InjectMocks
private TestResourceDao mySvc;
@ -236,16 +237,17 @@ class BaseHapiFhirResourceDaoTest {
// mock
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForRead(
any(RequestDetails.class),
Mockito.anyString(),
anyString(),
any(IIdType.class)
)).thenReturn(partitionId);
when(myIdHelperService.resolveResourcePersistentIds(
when(myIdHelperService.resolveResourceIdentityPid(
any(RequestPartitionId.class),
Mockito.anyString(),
Mockito.anyString()
anyString(),
anyString(),
any()
)).thenReturn(jpaPid);
when(myEntityManager.find(
any(Class.class),
any(),
anyLong()
)).thenReturn(entity);
// we don't stub myConfig.getResourceClientIdStrategy()

View File

@ -1,213 +0,0 @@
package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class IdHelperServiceTest {
@Mock
private JpaStorageSettings myStorageSettings;
@Mock
private IResourceTableDao myResourceTableDao;
@Mock
private MemoryCacheService myMemoryCacheService;
@Mock
private PartitionSettings myPartitionSettings;
@InjectMocks
private IdHelperService myHelperService;
@BeforeEach
public void beforeEach() {
myHelperService.setDontCheckActiveTransactionForUnitTest(true);
}
@Test
public void resolveResourcePersistentIds_withValidPids_returnsMap() {
RequestPartitionId partitionId = RequestPartitionId.allPartitions();
String resourceType = Patient.class.getSimpleName();
List<String> patientIdsToResolve = new ArrayList<>();
patientIdsToResolve.add("123");
patientIdsToResolve.add("456");
// test
Map<String, JpaPid> idToPid = myHelperService.resolveResourcePersistentIds(partitionId,
resourceType,
patientIdsToResolve);
assertFalse(idToPid.isEmpty());
for (String pid : patientIdsToResolve) {
assertThat(idToPid).containsKey(pid);
}
}
@Test
public void resolveResourcePersistentIds_withForcedIdsAndDeleteEnabled_returnsMap() {
RequestPartitionId partitionId = RequestPartitionId.allPartitions();
String resourceType = Patient.class.getSimpleName();
List<String> patientIdsToResolve = new ArrayList<>();
patientIdsToResolve.add("RED");
patientIdsToResolve.add("BLUE");
Object[] redView = new Object[] {
"Patient",
123l,
"RED",
new Date(),
null,
null
};
Object[] blueView = new Object[] {
"Patient",
456l,
"BLUE",
new Date(),
null,
null
};
// when
when(myStorageSettings.isDeleteEnabled())
.thenReturn(true);
when(myResourceTableDao.findAndResolveByForcedIdWithNoType(Mockito.anyString(),
Mockito.anyList(), Mockito.anyBoolean()))
.thenReturn(Collections.singletonList(redView))
.thenReturn(Collections.singletonList(blueView));
// test
Map<String, JpaPid> map = myHelperService.resolveResourcePersistentIds(
partitionId,
resourceType,
patientIdsToResolve);
assertFalse(map.isEmpty());
for (String id : patientIdsToResolve) {
assertThat(map).containsKey(id);
}
}
@Test
public void resolveResourcePersistenIds_withForcedIdAndDeleteDisabled_returnsMap() {
RequestPartitionId partitionId = RequestPartitionId.allPartitions();
String resourceType = Patient.class.getSimpleName();
List<String> patientIdsToResolve = new ArrayList<>();
patientIdsToResolve.add("RED");
patientIdsToResolve.add("BLUE");
JpaPid red = JpaPid.fromIdAndVersion(123L, 123L);
JpaPid blue = JpaPid.fromIdAndVersion(456L, 456L);
// we will pretend the lookup value is in the cache
when(myMemoryCacheService.getThenPutAfterCommit(any(MemoryCacheService.CacheEnum.class),
Mockito.anyString(),
any(Function.class)))
.thenReturn(red)
.thenReturn(blue);
// test
Map<String, JpaPid> map = myHelperService.resolveResourcePersistentIds(
partitionId,
resourceType,
patientIdsToResolve
);
assertFalse(map.isEmpty());
for (String id : patientIdsToResolve) {
assertThat(map).containsKey(id);
}
assertThat(map).containsEntry("RED", red);
assertThat(map).containsEntry("BLUE", blue);
}
@Test
public void testResolveResourceIdentity_defaultFunctionality(){
RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition");
String resourceType = "Patient";
String resourceForcedId = "AAA";
Object[] forcedIdView = new Object[6];
forcedIdView[0] = resourceType;
forcedIdView[1] = 1L;
forcedIdView[2] = resourceForcedId;
forcedIdView[3] = null;
forcedIdView[4] = null;
forcedIdView[5] = null;
Collection<Object[]> testForcedIdViews = new ArrayList<>();
testForcedIdViews.add(forcedIdView);
when(myResourceTableDao.findAndResolveByForcedIdWithNoTypeInPartition(any(), any(), any(), anyBoolean())).thenReturn(testForcedIdViews);
IResourceLookup<JpaPid> result = myHelperService.resolveResourceIdentity(partitionId, resourceType, resourceForcedId);
assertEquals(forcedIdView[0], result.getResourceType());
assertEquals(forcedIdView[1], result.getPersistentId().getId());
assertEquals(forcedIdView[3], result.getDeleted());
}
@Test
public void testResolveResourcePersistentIds_mapDefaultFunctionality(){
RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition");
String resourceType = "Patient";
List<String> ids = Arrays.asList("A", "B", "C");
JpaPid resourcePersistentId1 = JpaPid.fromId(1L);
JpaPid resourcePersistentId2 = JpaPid.fromId(2L);
JpaPid resourcePersistentId3 = JpaPid.fromId(3L);
when(myMemoryCacheService.getThenPutAfterCommit(any(), any(), any()))
.thenReturn(resourcePersistentId1)
.thenReturn(resourcePersistentId2)
.thenReturn(resourcePersistentId3);
Map<String, JpaPid> result = myHelperService.resolveResourcePersistentIds(partitionId, resourceType, ids)
.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue()));
assertThat(result.keySet()).hasSize(3);
assertEquals(1L, result.get("A").getId());
assertEquals(2L, result.get("B").getId());
assertEquals(3L, result.get("C").getId());
}
@Test
public void testResolveResourcePersistentIds_resourcePidDefaultFunctionality(){
RequestPartitionId partitionId = RequestPartitionId.fromPartitionIdAndName(1, "partition");
String resourceType = "Patient";
Long id = 1L;
JpaPid jpaPid1 = JpaPid.fromId(id);
when(myStorageSettings.getResourceClientIdStrategy()).thenReturn(JpaStorageSettings.ClientIdStrategyEnum.ANY);
when(myMemoryCacheService.getThenPutAfterCommit(any(), any(), any())).thenReturn(jpaPid1);
JpaPid result = myHelperService.resolveResourcePersistentIds(partitionId, resourceType, id.toString());
assertEquals(id, result.getId());
}
}

View File

@ -14,6 +14,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.submit.interceptor.SearchParamValidatingInterceptor;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.HapiExtensions;
import com.helger.commons.lang.StackTraceHelper;
@ -52,6 +53,9 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
private boolean myHaveDroppedForcedIdUniqueConstraint;
@Autowired
private IPartitionLookupSvc myPartitionConfigSvc;
@Autowired
private SearchParamValidatingInterceptor mySearchParamValidatingInterceptor;
private boolean myRegisteredSearchParamValidatingInterceptor;
@AfterEach
public void after() {
@ -68,6 +72,10 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(new JpaStorageSettings().isAutoCreatePlaceholderReferenceTargets());
myStorageSettings.setMassIngestionMode(new JpaStorageSettings().isMassIngestionMode());
myStorageSettings.setMatchUrlCacheEnabled(new JpaStorageSettings().getMatchUrlCache());
if (myRegisteredSearchParamValidatingInterceptor) {
myInterceptorRegistry.unregisterInterceptor(mySearchParamValidatingInterceptor);
}
}
protected void assertNoRemainingPartitionIds() {
@ -112,6 +120,10 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
myPartitionConfigSvc.getPartitionById(i);
}
if (myInterceptorRegistry.getAllRegisteredInterceptors().stream().noneMatch(t->t instanceof SearchParamValidatingInterceptor)) {
myRegisteredSearchParamValidatingInterceptor = true;
myInterceptorRegistry.registerInterceptor(mySearchParamValidatingInterceptor);
}
}
protected void registerPartitionInterceptor() {

View File

@ -8,9 +8,11 @@ import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import jakarta.persistence.Id;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.IdType;
@ -36,6 +38,24 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
myStorageSettings.setDeleteEnabled(new JpaStorageSettings().isDeleteEnabled());
}
@Test
public void testPreventUnDeleteIfDeletesDisabled() {
// Setup
createPatient(withId("A"), withActiveTrue());
myPatientDao.delete(new IdType("Patient/A"), mySrd);
// Test
myStorageSettings.setDeleteEnabled(false);
try {
createPatient(withId("A"), withActiveFalse());
fail();
} catch (InvalidRequestException e) {
// Verify
assertThat(e.getMessage()).contains("HAPI-2573: Unable to restore previously deleted resource as deletes are disabled");
}
}
@Test
public void testDeleteMarksResourceAndVersionAsDeleted() {

View File

@ -7,8 +7,8 @@ import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.chunk.TypedPidJson;
import ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeStep;
import ca.uhn.fhir.batch2.jobs.reindex.v1.ReindexStepV1;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexJobParameters;
import ca.uhn.fhir.batch2.jobs.reindex.v2.ReindexStepV2;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.context.FhirContext;
@ -40,6 +40,7 @@ import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
@ -51,6 +52,7 @@ import ca.uhn.fhir.test.utilities.server.HashMapResourceProviderExtension;
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
import ca.uhn.fhir.util.BundleBuilder;
import jakarta.annotation.Nonnull;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.BooleanType;
@ -100,6 +102,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import java.util.stream.Collectors;
@ -149,7 +152,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
@Autowired
private ISubscriptionTriggeringSvc mySubscriptionTriggeringSvc;
@Autowired
private ReindexStepV1 myReindexStepV1;
private ReindexStepV2 myReindexStep;
@Autowired
private DeleteExpungeStep myDeleteExpungeStep;
@Autowired
@ -248,6 +251,70 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
}
@Test
public void testTransactionWithManyResourceLinks() {
// Setup
List<IIdType> patientIds = new ArrayList<>();
List<IIdType> orgIds = new ArrayList<>();
List<IIdType> coverageIds = new ArrayList<>();
AtomicInteger counter = new AtomicInteger(0);
for (int i = 0; i < 10; i++) {
// Server-assigned numeric ID
coverageIds.add(createResource("Coverage", withStatus("active")));
// Client-assigned string ID
patientIds.add(createPatient(withId(UUID.randomUUID().toString()), withActiveTrue()));
orgIds.add(createOrganization(withId(UUID.randomUUID().toString()), withName("FOO")));
}
Supplier<ExplanationOfBenefit> creator = () -> {
int nextCount = counter.getAndIncrement();
assert nextCount < coverageIds.size();
ExplanationOfBenefit retVal = new ExplanationOfBenefit();
retVal.getMeta().addTag().setSystem("http://foo").setCode(Integer.toString(nextCount % 3));
retVal.setId(UUID.randomUUID().toString());
retVal.setPatient(new Reference(patientIds.get(nextCount)));
retVal.setInsurer(new Reference(orgIds.get(nextCount)));
retVal.addInsurance().setCoverage(new Reference(coverageIds.get(nextCount)));
return retVal;
};
Supplier<Bundle> transactionCreator = () -> {
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionUpdateEntry(creator.get());
bb.addTransactionUpdateEntry(creator.get());
bb.addTransactionUpdateEntry(creator.get());
bb.addTransactionUpdateEntry(creator.get());
bb.addTransactionUpdateEntry(creator.get());
return bb.getBundleTyped();
};
// Test
myCaptureQueriesListener.clear();
mySystemDao.transaction(mySrd, transactionCreator.get());
myCaptureQueriesListener.logInsertQueries();
myCaptureQueriesListener.logSelectQueries();
// Verify
assertEquals(0, myCaptureQueriesListener.countInsertQueriesRepeated());
assertEquals(33, myCaptureQueriesListener.countInsertQueries());
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
// Test again
myCaptureQueriesListener.clear();
mySystemDao.transaction(mySrd, transactionCreator.get());
// Verify again
assertEquals(0, myCaptureQueriesListener.countInsertQueriesRepeated());
assertEquals(30, myCaptureQueriesListener.countInsertQueries());
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
}
/*
* See the class javadoc before changing the counts in this test!
*/
@ -331,7 +398,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
group = updateGroup(group, patientList.subList(initialPatientsCount, allPatientsCount));
assertQueryCount(10, 1, 2, 0);
assertQueryCount(9, 1, 2, 0);
assertThat(group.getMember()).hasSize(allPatientsCount);
@ -356,7 +423,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
group = updateGroup(group, Collections.emptyList());
myCaptureQueriesListener.logSelectQueries();
assertQueryCount(5, 1, 2, 0);
assertQueryCount(4, 1, 2, 0);
assertThat(group.getMember()).hasSize(30);
@ -938,7 +1005,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
patient.setActive(true);
myCaptureQueriesListener.clear();
myPatientDao.update(patient);
myPatientDao.update(patient, mySrd);
/*
* Add a resource with a forced ID target link
@ -947,7 +1014,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
Observation observation = new Observation();
observation.getSubject().setReference("Patient/P");
myObservationDao.create(observation);
myObservationDao.create(observation, mySrd);
myCaptureQueriesListener.logAllQueriesForCurrentThread();
// select: lookup forced ID
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
@ -998,14 +1065,13 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
Observation observation = new Observation();
observation.getSubject().setReference("Patient/P");
myObservationDao.create(observation);
myCaptureQueriesListener.logAllQueriesForCurrentThread();
// select: lookup forced ID
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertNoPartitionSelectors();
myCaptureQueriesListener.logSelectQueries();
assertEquals(0, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
// insert to: HFJ_RESOURCE, HFJ_RES_VER, HFJ_RES_LINK
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertNoPartitionSelectors();
/*
* Add another
@ -1064,7 +1130,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
instance,
mock(WorkChunk.class)
);
RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, mock(IJobDataSink.class));
RunOutcome outcome = myReindexStep.run(stepExecutionDetails, mock(IJobDataSink.class));
// validate
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(theExpectedSelectCount);
@ -1109,7 +1175,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
instance,
mock(WorkChunk.class)
);
RunOutcome outcome = myReindexStepV1.run(stepExecutionDetails, mock(IJobDataSink.class));
RunOutcome outcome = myReindexStep.run(stepExecutionDetails, mock(IJobDataSink.class));
assertEquals(20, outcome.getRecordsProcessed());
// validate
@ -1343,6 +1409,57 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
return ids;
}
@Test
public void testSearchByMultipleIds() {
// Setup
List<String> idValues = new ArrayList<>();
for (int i = 0; i < 5; i++) {
// Client assigned and server assigned IDs
idValues.add(createPatient(withId(UUID.randomUUID().toString()), withActiveTrue()).toUnqualifiedVersionless().getValue());
idValues.add(createPatient(withActiveTrue()).toUnqualifiedVersionless().getValue());
}
String[] idValueArray = idValues.toArray(new String[0]);
// Test
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add(IAnyResource.SP_RES_ID, new TokenOrListParam(null, idValueArray));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myPatientDao.search(map, mySrd);
List<String> values = toUnqualifiedVersionlessIdValues(outcome);
// Verify
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(values).asList().containsExactlyInAnyOrder(idValueArray);
// Now invalidate the caches, should add one more query
myMemoryCacheService.invalidateAllCaches();
map = SearchParameterMap.newSynchronous();
map.add(IAnyResource.SP_RES_ID, new TokenOrListParam(null, idValueArray));
myCaptureQueriesListener.clear();
outcome = myPatientDao.search(map, mySrd);
values = toUnqualifiedVersionlessIdValues(outcome);
// Verify
myCaptureQueriesListener.logSelectQueries();
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
assertThat(values).asList().containsExactlyInAnyOrder(idValueArray);
// And again, should be cached once more
map = SearchParameterMap.newSynchronous();
map.add(IAnyResource.SP_RES_ID, new TokenOrListParam(null, idValueArray));
myCaptureQueriesListener.clear();
outcome = myPatientDao.search(map, mySrd);
values = toUnqualifiedVersionlessIdValues(outcome);
// Verify
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
assertThat(values).asList().containsExactlyInAnyOrder(idValueArray);
}
/**
* See the class javadoc before changing the counts in this test!
@ -1582,12 +1699,13 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
IBundleProvider outcome = dao.search(map, mySrd);
toUnqualifiedVersionlessIdValues(outcome);
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.logSelectQueries();
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
myCaptureQueriesListener.clear();
outcome = dao.search(map, mySrd);
toUnqualifiedVersionlessIdValues(outcome);
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
}
@ -1869,6 +1987,62 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
runInTransaction(() -> assertEquals(2, myResourceTableDao.count()));
}
/**
* Make sure that even if we're using versioned references, we still take
* advantage of query pre-fetching.
*/
@Test
public void testTransactionPreFetchFullyQualifiedVersionedIds() {
// Setup
myStorageSettings.getTreatBaseUrlsAsLocal().add("http://localhost");
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
createPatient(withId("P0"), withActiveTrue());
createPatient(withId("P1"), withActiveTrue());
createEncounter(withId("E0"), withSubject("Patient/P0"), withStatus("planned"));
createObservation(withId("O0"), withSubject("Patient/P0"));
Bundle input = new Bundle();
input.setType(Bundle.BundleType.TRANSACTION);
Patient patient = new Patient();
patient.setId("Patient/P1");
patient.setActive(false);
input.addEntry()
.setResource(patient)
.setFullUrl("http://localhost/Patient/P1/_history/1")
.getRequest()
.setUrl("Patient/P1/_history/1")
.setMethod(Bundle.HTTPVerb.PUT);
Observation observation = new Observation();
observation.setId("Observation/O0");
observation.setSubject(new Reference("http://localhost/Patient/P0/_history/1"));
observation.setEncounter(new Reference("http://localhost/Encounter/E0/_history/1"));
input.addEntry()
.setResource(observation)
.setFullUrl("http://localhost/Observation/O0/_history/1")
.getRequest()
.setUrl("Observation/O0/_history/1")
.setMethod(Bundle.HTTPVerb.PUT);
myCaptureQueriesListener.clear();
mySystemDao.transaction(mySrd, input);
// Verify
myCaptureQueriesListener.logSelectQueries();
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
assertEquals(5, myCaptureQueriesListener.countUpdateQueries());
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
assertEquals(1, myCaptureQueriesListener.countCommits());
observation = myObservationDao.read(new IdType("Observation/O0"), mySrd);
ourLog.info("Observation:{}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(observation));
assertEquals("Patient/P0/_history/1", observation.getSubject().getReference());
assertEquals("Encounter/E0/_history/1", observation.getEncounter().getReference());
}
/**
* See the class javadoc before changing the counts in this test!
*/
@ -2061,9 +2235,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
mySystemDao.transaction(mySrd, input);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, countMatches(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false), "'6445233466262474106'"));
assertEquals(1, countMatches(myCaptureQueriesListener.getSelectQueries().get(1).getSql(true, false), "'LOC'"));
assertEquals(6, runInTransaction(() -> myResourceTableDao.count()));
// Second identical pass
@ -2114,7 +2287,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
mySystemDao.transaction(mySrd, input);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(6, runInTransaction(() -> myResourceTableDao.count()));
// Second identical pass
@ -2172,7 +2345,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
mySystemDao.transaction(mySrd, input);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(7, runInTransaction(() -> myResourceTableDao.count()));
// Second identical pass
@ -2227,7 +2400,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
mySystemDao.transaction(mySrd, input);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(7, runInTransaction(() -> myResourceTableDao.count()));
// Second identical pass
@ -2649,7 +2822,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
// Lookup the two existing IDs to make sure they are legit
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
@ -2677,7 +2850,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
// Lookup the two existing IDs to make sure they are legit
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
@ -2721,7 +2894,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
// Lookup the two existing IDs to make sure they are legit
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
@ -2749,7 +2922,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
// Lookup the two existing IDs to make sure they are legit
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
@ -2795,7 +2968,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
// Lookup the two existing IDs to make sure they are legit
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(10, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
@ -3068,8 +3241,13 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
Bundle input = loadResource(myFhirContext, Bundle.class, "/r4/test-patient-bundle.json");
myCaptureQueriesListener.clear();
Bundle output = mySystemDao.transaction(mySrd, input);
myCaptureQueriesListener.logSelectQueries();
Bundle output;
try {
output = mySystemDao.transaction(mySrd, input);
} finally {
myCaptureQueriesListener.logSelectQueries();
myCaptureQueriesListener.logInsertQueries();
}
assertEquals(17, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(6189, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
@ -3113,6 +3291,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
Bundle output = mySystemDao.transaction(mySrd, input);
// Verify
myCaptureQueriesListener.logUpdateQueries();
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(6, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
@ -3127,6 +3306,15 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
assertEquals(1, myResourceHistoryTableDao.count());
});
IdType id = new IdType(output.getEntry().get(0).getResponse().getLocation());
assertEquals("1", id.getVersionIdPart());
id = new IdType(output.getEntry().get(1).getResponse().getLocation());
assertEquals("2", id.getVersionIdPart());
Patient p = myPatientDao.read(id.toVersionless(), mySrd);
assertEquals("2", p.getIdElement().getVersionIdPart());
assertEquals("http://system", p.getIdentifierFirstRep().getSystem());
assertTrue(p.getActive());
}
/**
@ -3164,7 +3352,8 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
mySystemDao.transaction(mySrd, inputBundle);
assertEquals(21, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
assertEquals(78, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
myCaptureQueriesListener.logInsertQueries();
assertEquals(7, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
// Now run the transaction again - It should not need too many SELECTs
@ -3201,6 +3390,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
inputBundle = myReindexTestHelper.createTransactionBundleWith20Observation(true);
mySystemDao.transaction(mySrd, inputBundle);
myCaptureQueriesListener.logSelectQueries();
assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
assertEquals(0, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
@ -3212,7 +3402,6 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
/**
* See the class javadoc before changing the counts in this test!
*/
@SuppressWarnings("unchecked")
@Test
public void testTriggerSubscription_Sync() throws Exception {
// Setup
@ -3505,6 +3694,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
myCaptureQueriesListener.clear();
Bundle outcome = mySystemDao.transaction(new SystemRequestDetails(), supplier.get());
myCaptureQueriesListener.logSelectQueries();
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
myCaptureQueriesListener.logInsertQueries();
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());

View File

@ -97,9 +97,9 @@ public class FhirResourceDaoR4ReferentialIntegrityTest extends BaseJpaR4Test {
// When
myPatientDao.create(p);
fail();
} catch (UnprocessableEntityException e) {
} catch (InvalidRequestException e) {
// Then: identify that it is the wrong resource type, since ref integrity is enabled
assertEquals(Msg.code(1095) + "Resource contains reference to unknown resource ID Organization/" + obsId.getIdPart(), e.getMessage());
assertEquals(Msg.code(1094) + "Resource Organization/" + obsId.getIdPart() + " not found, specified in path: Patient.managingOrganization", e.getMessage());
}
// Given: now disable referential integrity on write

View File

@ -177,7 +177,7 @@ public class FhirResourceDaoR4SearchIncludeTest extends BaseJpaR4Test {
}
if (!theReverse && theMatchAll) {
SqlQuery searchForCanonicalReferencesQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(3);
SqlQuery searchForCanonicalReferencesQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2);
// Make sure we have the right query - If this ever fails, maybe we have optimized the queries
// (or somehow made things worse) and the search for the canonical target is no longer the 4th
// SQL query

View File

@ -1731,65 +1731,6 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
}
@Test
public void testSearchByIdParam_QueryIsMinimal() {
// With only an _id parameter
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(PARAM_ID, new StringParam("DiagnosticReport/123"));
myCaptureQueriesListener.clear();
myDiagnosticReportDao.search(params).size();
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
assertThat(selectQueries).hasSize(1);
String sqlQuery = selectQueries.get(0).getSql(true, true).toLowerCase();
ourLog.info("SQL Query:\n{}", sqlQuery);
assertThat(countMatches(sqlQuery, "res_id = '123'")).as(sqlQuery).isEqualTo(1);
assertThat(countMatches(sqlQuery, "join")).as(sqlQuery).isEqualTo(0);
assertThat(countMatches(sqlQuery, "res_type = 'diagnosticreport'")).as(sqlQuery).isEqualTo(1);
assertThat(countMatches(sqlQuery, "res_deleted_at is null")).as(sqlQuery).isEqualTo(1);
}
// With an _id parameter and a standard search param
{
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(PARAM_ID, new StringParam("DiagnosticReport/123"));
params.add("code", new TokenParam("foo", "bar"));
myCaptureQueriesListener.clear();
myDiagnosticReportDao.search(params).size();
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
assertThat(selectQueries).hasSize(1);
String sqlQuery = selectQueries.get(0).getSql(true, true).toLowerCase();
ourLog.info("SQL Query:\n{}", sqlQuery);
assertThat(countMatches(sqlQuery, "res_id = '123'")).as(sqlQuery).isEqualTo(1);
assertThat(countMatches(sqlQuery, "join")).as(sqlQuery).isEqualTo(1);
assertThat(countMatches(sqlQuery, "hash_sys_and_value")).as(sqlQuery).isEqualTo(1);
assertThat(countMatches(sqlQuery, "res_type = 'diagnosticreport")).as(sqlQuery).isEqualTo(0); // could be 0
assertThat(countMatches(sqlQuery, "res_deleted_at")).as(sqlQuery).isEqualTo(0); // could be 0
}
}
@Test
public void testSearchByIdParamAndOtherSearchParam_QueryIsMinimal() {
SearchParameterMap params = new SearchParameterMap();
params.setLoadSynchronous(true);
params.add(PARAM_ID, new StringParam("DiagnosticReport/123"));
params.add(PARAM_ID, new StringParam("DiagnosticReport/123"));
myCaptureQueriesListener.clear();
myDiagnosticReportDao.search(params).size();
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
assertThat(selectQueries).hasSize(1);
String sqlQuery = selectQueries.get(0).getSql(true, true).toLowerCase();
ourLog.info("SQL Query:\n{}", sqlQuery);
assertThat(countMatches(sqlQuery, "res_id = '123'")).as(sqlQuery).isEqualTo(1);
assertThat(countMatches(sqlQuery, "join")).as(sqlQuery).isEqualTo(0);
assertThat(countMatches(sqlQuery, "res_type = 'diagnosticreport'")).as(sqlQuery).isEqualTo(1);
assertThat(countMatches(sqlQuery, "res_deleted_at is null")).as(sqlQuery).isEqualTo(1);
}
@Test
public void testSearchByIdParamAnd() {
IIdType id1;

View File

@ -95,7 +95,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
mySearchCoordinatorSvcImpl = ProxyUtil.getSingletonTarget(mySearchCoordinatorSvc, SearchCoordinatorSvcImpl.class);
mySearchCoordinatorSvcImpl.setLoadingThrottleForUnitTests(null);
mySearchCoordinatorSvcImpl.setSyncSizeForUnitTests(QueryParameterUtils.DEFAULT_SYNC_SIZE);
myCaptureQueriesListener.setCaptureQueryStackTrace(true);
// myCaptureQueriesListener.setCaptureQueryStackTrace(true);
myStorageSettings.setAdvancedHSearchIndexing(false);
}
@ -876,8 +876,11 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
String obs2id = myObservationDao.create(obs2).getId().getIdPart();
assertThat(obs2id).matches("^[0-9]+$");
// Search by ID where all IDs are forced IDs
// Search by ID where all IDs are forced IDs, and in two separate params
{
myMemoryCacheService.invalidateAllCaches();
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_id", new TokenParam("A"));
map.add("subject", new ReferenceParam("Patient/B"));
@ -885,20 +888,20 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
myCaptureQueriesListener.clear();
IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails());
assertThat(outcome.getResources(0, 999)).hasSize(1);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "rt1_0.res_type='observation'")).as(selectQuery).isEqualTo(1);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "rt1_0.fhir_id in ('a')")).as(selectQuery).isEqualTo(1);
assertThat(selectQuery).contains("where rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B'");
selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "select t1.res_id from hfj_resource t1")).as(selectQuery).isEqualTo(1);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t1.res_type='observation'")).as(selectQuery).isEqualTo(0);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t1.res_deleted_at is null")).as(selectQuery).isEqualTo(0);
assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A')");
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
}
// Search by ID where at least one ID is a numeric ID
{
myMemoryCacheService.invalidateAllCaches();
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_id", new TokenOrListParam(null, "A", obs2id));
myCaptureQueriesListener.clear();
@ -906,20 +909,19 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
assertEquals(2, outcome.size());
assertThat(outcome.getResources(0, 999)).hasSize(2);
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "select t0.res_id from hfj_resource t0")).as(selectQuery).isEqualTo(1);
// Because we included a non-forced ID, we need to verify the type
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_type = 'observation'")).as(selectQuery).isEqualTo(1);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_deleted_at is null")).as(selectQuery).isEqualTo(1);
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A' or rt1_0.RES_ID='" + obs2id + "')");
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
}
// Delete the resource - There should only be one search performed because deleted resources will
// be filtered out in the query that resolves forced ids to persistent ids
// Delete the resource
myObservationDao.delete(new IdType("Observation/A"));
myObservationDao.delete(new IdType("Observation/" + obs2id));
// Search by ID where all IDs are forced IDs
{
myMemoryCacheService.invalidateAllCaches();
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_id", new TokenParam("A"));
myCaptureQueriesListener.clear();
@ -928,26 +930,9 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
assertThat(outcome.getResources(0, 999)).isEmpty();
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertThat(myCaptureQueriesListener.getSelectQueriesForCurrentThread()).hasSize(1);
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "rt1_0.res_type='observation'")).as(selectQuery).isEqualTo(1);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "rt1_0.fhir_id in ('a')")).as(selectQuery).isEqualTo(1);
}
// Search by ID where at least one ID is a numeric ID
{
SearchParameterMap map = SearchParameterMap.newSynchronous();
map.add("_id", new TokenOrListParam(null, "A", obs2id));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myObservationDao.search(map, new SystemRequestDetails());
assertEquals(0, outcome.size());
assertThat(outcome.getResources(0, 999)).isEmpty();
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
String selectQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "select t0.res_id from hfj_resource t0")).as(selectQuery).isEqualTo(1);
// Because we included a non-forced ID, we need to verify the type
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_type = 'observation'")).as(selectQuery).isEqualTo(1);
assertThat(StringUtils.countMatches(selectQuery.toLowerCase(), "t0.res_deleted_at is null")).as(selectQuery).isEqualTo(1);
assertThat(selectQuery).contains("where (rt1_0.RES_TYPE='Observation' and rt1_0.FHIR_ID='A')");
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
}
}
@ -1248,7 +1233,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
myPatientDao.update(p).getId().toUnqualifiedVersionless();
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.api.pid.StreamTemplate;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
@ -1090,16 +1091,28 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
runInTransaction(() -> {
Patient p = new Patient();
p.setId(id);
p.setId(id.getIdPart());
p.setActive(true);
p.addName().setFamily("FAMILY");
myPatientDao.update(p);
IIdType update1Id = myPatientDao.update(p).getId();
assertEquals("2", update1Id.getVersionIdPart());
p = new Patient();
p.setId(id);
p.setId(id.getIdPart());
p.setActive(false);
p.addName().setFamily("FAMILY2");
myPatientDao.update(p);
IIdType update2Id = myPatientDao.update(p).getId();
assertEquals("3", update2Id.getVersionIdPart());
assertEquals(update1Id.getIdPart(), update2Id.getIdPart());
});
runInTransaction(()->{
List<ResourceTable> resourceTables = myResourceTableDao.findAll();
assertEquals(1, resourceTables.size());
assertEquals(2, resourceTables.get(0).getVersion());
List<ResourceHistoryTable> versions = myResourceHistoryTableDao.findAllVersionsForResourceIdInOrder(resourceTables.get(0).getResourceId());
assertThat(versions.stream().map(ResourceHistoryTable::getVersion).toList()).asList().containsExactly(1L, 2L, 3L);
});
assertEquals(1, myPatientDao.search(SearchParameterMap.newSynchronous("name", new StringParam("family2"))).size());
@ -1193,62 +1206,29 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
}
@Test
public void testCreateWithIllegalReference() {
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testCreateWithIllegalReference_InvalidTypeForElement(boolean theClientAssignedId) {
Observation o1 = new Observation();
o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01");
IIdType id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless();
IIdType id1;
if (theClientAssignedId) {
o1.setId("testCreateWithIllegalReference");
id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless();
} else {
id1 = myObservationDao.create(o1, mySrd).getId().toUnqualifiedVersionless();
}
try {
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(id1);
p.getManagingOrganization().setReference(id1.getValue());
myPatientDao.create(p, mySrd);
fail();
fail("");
} catch (UnprocessableEntityException e) {
assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage());
}
try {
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(new IdType("Organization", id1.getIdPart()));
myPatientDao.create(p, mySrd);
fail();
} catch (UnprocessableEntityException e) {
assertEquals(Msg.code(1095) + "Resource contains reference to unknown resource ID Organization/" + id1.getIdPart(), e.getMessage());
}
// Now with a forced ID
o1 = new Observation();
o1.setId("testCreateWithIllegalReference");
o1.getCode().addCoding().setSystem("foo").setCode("testChoiceParam01");
id1 = myObservationDao.update(o1, mySrd).getId().toUnqualifiedVersionless();
try {
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(id1);
myPatientDao.create(p, mySrd);
fail();
} catch (UnprocessableEntityException e) {
assertEquals(Msg.code(931) + "Invalid reference found at path 'Patient.managingOrganization'. Resource type 'Observation' is not valid for this path", e.getMessage());
}
try {
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(new IdType("Organization", id1.getIdPart()));
myPatientDao.create(p, mySrd);
fail();
} catch (InvalidRequestException e) {
assertEquals(Msg.code(1094) + "Resource Organization/testCreateWithIllegalReference not found, specified in path: Patient.managingOrganization", e.getMessage());
}
// Disable validation
myStorageSettings.setEnforceReferenceTargetTypes(false);
Patient p = new Patient();
p.getManagingOrganization().setReferenceElement(id1);
myPatientDao.create(p, mySrd);
}
@Test
@ -2803,7 +2783,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
myPatientDao.read(new IdType("Patient/9999999999999/_history/1"), mySrd);
fail();
} catch (ResourceNotFoundException e) {
assertEquals(Msg.code(1996) + "Resource Patient/9999999999999/_history/1 is not known", e.getMessage());
assertEquals(Msg.code(2001) + "Resource Patient/9999999999999 is not known", e.getMessage());
}
}

View File

@ -32,6 +32,7 @@ import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.OperationOutcomeUtil;
import ca.uhn.fhir.util.StopWatch;
import ca.uhn.fhir.validation.IValidatorModule;
import ca.uhn.test.util.LogbackTestExtension;
import ca.uhn.test.util.LogbackTestExtensionAssert;
import ch.qos.logback.classic.Level;
import org.apache.commons.io.IOUtils;
@ -85,6 +86,7 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.springframework.beans.factory.annotation.Autowired;
@ -113,6 +115,10 @@ import static org.mockito.Mockito.when;
public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4ValidateTest.class);
@RegisterExtension
public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension();
@Autowired
private IValidatorModule myValidatorModule;
@Autowired

View File

@ -169,9 +169,10 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
List<SqlQuery> selectQueries = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
assertEquals(1, selectQueries.size());
// Look up the referenced subject/patient
String sql = selectQueries.get(0).getSql(true, false).toLowerCase();
assertThat(sql).contains(" from hfj_resource ");
assertEquals(2, StringUtils.countMatches(selectQueries.get(0).getSql(true, false).toLowerCase(), "partition"));
String searchSql = selectQueries.get(0).getSql(true, false);
assertThat(searchSql).contains(" from HFJ_RESOURCE ");
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID from"), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql);
runInTransaction(() -> {
List<ResourceLink> resLinks = myResourceLinkDao.findAll();
@ -180,7 +181,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(obsId.getIdPartAsLong(), resLinks.get(0).getSourceResourcePid());
assertEquals(patientId.getIdPartAsLong(), resLinks.get(0).getTargetResourcePid());
assertEquals(myPartitionId, resLinks.get(0).getTargetResourcePartitionId().getPartitionId());
assertLocalDateFromDbMatches(myPartitionDate, resLinks.get(0).getTargetResourcePartitionId().getPartitionDate());
});
}
@ -466,7 +466,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(myPartitionId, resourceLinks.get(0).getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, resourceLinks.get(0).getPartitionId().getPartitionDate());
assertEquals(myPartitionId, resourceLinks.get(0).getTargetResourcePartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, resourceLinks.get(0).getTargetResourcePartitionId().getPartitionDate());
// HFJ_RES_PARAM_PRESENT
List<SearchParamPresentEntity> presents = mySearchParamPresentDao.findAllForResource(resourceTable);
@ -876,7 +875,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
IdType gotId1 = myPatientDao.read(patientId1, mySrd).getIdElement().toUnqualifiedVersionless();
assertEquals(patientId1, gotId1);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
// Only the read columns should be used, no criteria use partition
@ -888,7 +887,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
IdType gotId2 = myPatientDao.read(patientId2, mySrd).getIdElement().toUnqualifiedVersionless();
assertEquals(patientId2, gotId2);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
// Only the read columns should be used, no criteria use partition
@ -925,7 +924,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
myPatientDao.read(patientIdNull, mySrd).getIdElement().toUnqualifiedVersionless();
fail();
} catch (ResourceNotFoundException e) {
assertThat(e.getMessage()).matches(Msg.code(1996) + "Resource Patient/[0-9]+ is not known");
assertThat(e.getMessage()).matches(Msg.code(2001) + "Resource Patient/[0-9]+ is not known");
}
}
@ -936,7 +935,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
myPatientDao.read(patientId2, mySrd).getIdElement().toUnqualifiedVersionless();
fail();
} catch (ResourceNotFoundException e) {
assertThat(e.getMessage()).matches(Msg.code(1996) + "Resource Patient/[0-9]+ is not known");
assertThat(e.getMessage()).matches(Msg.code(2001) + "Resource Patient/[0-9]+ is not known");
}
}
}
@ -948,6 +947,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
createPatient(withPartition(2), withActiveTrue());
IIdType patientId3 = createPatient(withPartition(3), withActiveTrue());
logAllResources();
// Two partitions - Found
{
myCaptureQueriesListener.clear();
@ -957,22 +958,30 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(patientId1, gotId1);
// Only the read columns should be used, but no selectors on partition ID
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID from"), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID in ("), searchSql);
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql);
}
// Two partitions including default - Found
{
myCaptureQueriesListener.clear();
myPartitionInterceptor.addReadPartition(RequestPartitionId.fromPartitionNames(PARTITION_1, JpaConstants.DEFAULT_PARTITION_NAME));
IdType gotId1 = myPatientDao.read(patientIdNull, mySrd).getIdElement().toUnqualifiedVersionless();
IdType gotId1;
try {
gotId1 = myPatientDao.read(patientIdNull, mySrd).getIdElement().toUnqualifiedVersionless();
} finally {
myCaptureQueriesListener.logSelectQueries();
}
assertEquals(patientIdNull, gotId1);
// Only the read columns should be used, but no selectors on partition ID
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID from"), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID in ("), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID is null"), searchSql);
assertEquals(3, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql);
}
// Two partitions - Not Found
@ -1010,10 +1019,15 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
IdType gotId1 = myPatientDao.read(patientId1, mySrd).getIdElement().toUnqualifiedVersionless();
assertEquals(patientId1, gotId1);
// Only the read columns should be used, but no selectors on partition ID
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
String resolveIdentitySql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
assertEquals(1, StringUtils.countMatches(resolveIdentitySql, "PARTITION_ID from"), resolveIdentitySql);
assertEquals(1, StringUtils.countMatches(resolveIdentitySql, "PARTITION_ID in ("), resolveIdentitySql);
assertEquals(2, StringUtils.countMatches(resolveIdentitySql, "PARTITION_ID"), resolveIdentitySql);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID in ("), searchSql);
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID"), searchSql);
}
// Two partitions including default - Found
@ -1025,7 +1039,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(patientIdNull, gotId1);
// Only the read columns should be used, but no selectors on partition ID
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID is null"), searchSql);
}
@ -1079,7 +1093,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
myPatientDao.read(patientId1, mySrd).getIdElement().toUnqualifiedVersionless();
fail();
} catch (ResourceNotFoundException e) {
assertThat(e.getMessage()).matches(Msg.code(1996) + "Resource Patient/[0-9]+ is not known");
assertThat(e.getMessage()).matches(Msg.code(2001) + "Resource Patient/[0-9]+ is not known");
}
}
}
@ -1269,6 +1283,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
IIdType gotId1 = searchOutcome.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless();
assertEquals(patientId1, gotId1);
myCaptureQueriesListener.logSelectQueries();
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
ourLog.info("Search SQL:\n{}", searchSql);
@ -1321,6 +1336,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
IIdType gotId1 = searchOutcome.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless();
assertEquals(patientId1, gotId1);
myCaptureQueriesListener.logSelectQueries();
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
ourLog.info("Search SQL:\n{}", searchSql);
@ -1380,9 +1396,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
ourLog.info("Search SQL:\n{}", searchSql);
// Only the read columns should be used, no criteria use partition
assertThat(searchSql).as(searchSql).contains("PARTITION_ID IN ('1')");
assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID,")).as(searchSql).isEqualTo(1);
assertThat(StringUtils.countMatches(searchSql, "PARTITION_DATE")).as(searchSql).isEqualTo(1);
assertThat(searchSql).as(searchSql).contains("PARTITION_ID = '1'");
assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID")).as(searchSql).isEqualTo(2);
}
// Read in null Partition
@ -1430,20 +1445,13 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(1, searchOutcome.size());
IIdType gotId1 = searchOutcome.getResources(0, 1).get(0).getIdElement().toUnqualifiedVersionless();
assertEquals(patientId1, gotId1);
myCaptureQueriesListener.logSelectQueries();
// First SQL resolves the forced ID
// Performs the search
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false).toUpperCase();
ourLog.info("Search SQL:\n{}", searchSql);
assertThat(searchSql).as(searchSql).contains("PARTITION_ID IN ('1')");
assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID,")).as(searchSql).isEqualTo(1);
assertThat(StringUtils.countMatches(searchSql, "PARTITION_DATE")).as(searchSql).isEqualTo(1);
// Second SQL performs the search
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false).toUpperCase();
ourLog.info("Search SQL:\n{}", searchSql);
assertThat(searchSql).as(searchSql).contains("FROM HFJ_RESOURCE ");
assertThat(searchSql).as(searchSql).contains("PARTITION_ID = '1'");
assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID")).as(searchSql).isEqualTo(3);
}
// Read in null Partition
@ -2096,9 +2104,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false);
ourLog.info("Search SQL:\n{}", searchSql);
assertThat(searchSql).as(searchSql).contains("PARTITION_ID in ('1')");
assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID,")).as(searchSql).isEqualTo(1);
assertThat(StringUtils.countMatches(searchSql, "PARTITION_DATE")).as(searchSql).isEqualTo(1);
assertThat(searchSql).as(searchSql).contains("PARTITION_ID = '1'");
assertThat(StringUtils.countMatches(searchSql, "PARTITION_ID")).as(searchSql).isEqualTo(2);
}

View File

@ -43,20 +43,25 @@ public class PatientCompartmentEnforcingInterceptorTest extends BaseResourceProv
myPartitionSettings.setDefaultPartitionId(ALTERNATE_DEFAULT_ID);
}
@Override
@AfterEach
public void after() {
public void after() throws Exception {
super.after();
myInterceptorRegistry.unregisterInterceptor(myPatientIdPartitionInterceptor);
myInterceptorRegistry.unregisterInterceptor(myForceOffsetSearchModeInterceptor);
myInterceptorRegistry.unregisterInterceptor(mySvc);
myPartitionSettings.setPartitioningEnabled(false);
myPartitionSettings.setUnnamedPartitionMode(new PartitionSettings().isUnnamedPartitionMode());
myPartitionSettings.setDefaultPartitionId(new PartitionSettings().getDefaultPartitionId());
PartitionSettings defaultPartitionSettings = new PartitionSettings();
myPartitionSettings.setUnnamedPartitionMode(defaultPartitionSettings.isUnnamedPartitionMode());
myPartitionSettings.setDefaultPartitionId(defaultPartitionSettings.getDefaultPartitionId());
myPartitionSettings.setAllowReferencesAcrossPartitions(defaultPartitionSettings.getAllowReferencesAcrossPartitions());
}
@Test
public void testUpdateResource_whenCrossingPatientCompartment_throws() {
myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
createPatientA();
createPatientB();

View File

@ -188,9 +188,8 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
Patient patient = myPatientDao.read(new IdType("Patient/A"), mySrd);
assertTrue(patient.getActive());
myCaptureQueriesListener.logSelectQueries();
assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(3);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("rt1_0.PARTITION_ID in (?)");
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false)).contains("where rt1_0.PARTITION_ID=? and rt1_0.RES_ID=?");
assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(2);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("rt1_0.PARTITION_ID=?");
}
@Test
@ -223,9 +222,8 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
List<SqlQuery> selectQueriesForCurrentThread = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
assertEquals(4, selectQueriesForCurrentThread.size());
assertThat(selectQueriesForCurrentThread.get(0).getSql(false, false)).contains("PARTITION_ID in (?)");
assertThat(selectQueriesForCurrentThread.get(1).getSql(false, false)).contains("PARTITION_ID=");
assertEquals(3, selectQueriesForCurrentThread.size());
assertThat(selectQueriesForCurrentThread.get(0).getSql(false, false)).contains("PARTITION_ID=?");
}
@ -237,9 +235,8 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
IBundleProvider outcome = myPatientDao.search(SearchParameterMap.newSynchronous("_id", new TokenParam("A")), mySrd);
assertEquals(1, outcome.size());
myCaptureQueriesListener.logSelectQueries();
assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(3);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("rt1_0.PARTITION_ID in (?)");
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false)).contains("t0.PARTITION_ID = ?");
assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(2);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("PARTITION_ID = ?");
}
@Test
@ -352,9 +349,9 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
IBundleProvider outcome = myOrganizationDao.history(new IdType("Organization/C"), null, null, null, mySrd);
myCaptureQueriesListener.logSelectQueries();
assertEquals(2, outcome.size());
assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(3);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("PARTITION_ID in ");
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false)).contains("PARTITION_ID=");
assertThat(myCaptureQueriesListener.getSelectQueries()).hasSize(2);
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false)).contains("PARTITION_ID=?");
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false)).contains("PARTITION_ID in (?)");
}

View File

@ -137,6 +137,7 @@ public class ConsentInterceptorResourceProviderR4IT extends BaseResourceProvider
myInterceptorRegistry.unregisterInterceptor(myBinaryStorageInterceptor);
}
@Test
public void testSearchAndBlockSomeWithReject() {
create50Observations();
@ -960,7 +961,12 @@ public class ConsentInterceptorResourceProviderR4IT extends BaseResourceProvider
String fhirType = theResource.fhirType();
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(fhirType);
String currentTransactionName = TransactionSynchronizationManager.getCurrentTransactionName();
dao.read(theResource.getIdElement());
try {
dao.read(theResource.getIdElement());
} catch (ResourceNotFoundException e) {
// this is expected, the resource isn't saved in the DB yet. But it shouldn't
// be an NPE, only a ResourceNotFoundException
}
return ConsentOutcome.PROCEED;
}

View File

@ -531,7 +531,8 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
myPatientDao.update((Patient) patientA, requestDetails);
fail();
} catch (InvalidRequestException e) {
assertEquals(Msg.code(2079) + "Resource " + ((Patient) patientA).getResourceType() + "/" + ((Patient) patientA).getIdElement().getIdPart() + " is not known", e.getMessage());
String idPart = ((Patient) patientA).getIdElement().getIdPart();
assertThat(e.getMessage()).contains("HAPI-0960: Can not create resource with ID[" + idPart + "]");
}
}

View File

@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.util.SqlQueryList;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.util.BundleUtil;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
@ -39,7 +40,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test {
class SqlCapturingInterceptor {
static class SqlCapturingInterceptor {
SqlQueryList queryList = null;
@Hook(Pointcut.JPA_PERFTRACE_RAW_SQL)
public void trackRequest(RequestDetails theRequestDetails, SqlQueryList theQueryList) {
@ -89,7 +90,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test {
Bundle bundle = myClient.search()
.forResource(Patient.class)
.count(200)
.where(Patient.RES_ID.exactly().codes(pid.getIdPart()))
.where(IAnyResource.RES_ID.exactly().codes(pid.getIdPart()))
.revInclude(new Include("CareTeam:subject:Group").setRecurse(true))
// .revInclude(new Include("CareTeam:subject:Group"))
.revInclude(new Include("Group:member:Patient"))
@ -128,7 +129,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test {
}
break;
default:
ourLog.warn(type + " found but not expected");
ourLog.warn("{} found but not expected", type);
}
if (patientFound && groupFound && careTeamFound && detectedIssueFound) {
@ -144,7 +145,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test {
assertEquals(methodName, patient.getName().get(0).getFamily());
//Ensure that the revincludes are included in the query list of the sql trace.
//TODO GGG/KHS reduce this to something less than 6 by smarter iterating and getting the resource types earlier when needed.
//TODO GGG/KHS reduce this to something less than 5 by smarter iterating and getting the resource types earlier when needed.
assertThat(sqlCapturingInterceptor.getQueryList()).hasSize(5);
myInterceptorRegistry.unregisterInterceptor(sqlCapturingInterceptor);
}
@ -166,7 +167,7 @@ public class ResourceProviderRevIncludeTest extends BaseResourceProviderR4Test {
// DetectedIssue?_id=123&_include=DetectedIssue:author&_revinclude=PractitionerRole:practitioner
Bundle bundle = myClient.search()
.forResource(DetectedIssue.class)
.where(DetectedIssue.RES_ID.exactly().codes(detectedIssueId.getIdPart()))
.where(IAnyResource.RES_ID.exactly().codes(detectedIssueId.getIdPart()))
.include(new Include("DetectedIssue:author"))
.revInclude(new Include("PractitionerRole:practitioner").setRecurse(true))
.returnBundle(Bundle.class)

View File

@ -292,11 +292,11 @@ public class GiantTransactionPerfTest {
ourLog.info("Merges:\n * " + myEntityManager.myMergeCount.stream().map(t->t.toString()).collect(Collectors.joining("\n * ")));
assertThat(myEntityManager.myPersistCount.stream().map(t -> t.getClass().getSimpleName()).collect(Collectors.toList())).containsExactly("ResourceTable");
assertThat(myEntityManager.myPersistCount.stream().map(t -> t.getClass().getSimpleName()).collect(Collectors.toList())).containsExactly("ResourceTable", "ResourceHistoryTable");
assertThat(myEntityManager.myMergeCount.stream().map(t -> t.getClass().getSimpleName()).collect(Collectors.toList())).containsExactlyInAnyOrder("ResourceTable", "ResourceIndexedSearchParamToken", "ResourceIndexedSearchParamToken");
assertEquals(1, myEntityManager.myFlushCount);
assertEquals(1, myResourceVersionSvc.myGetVersionMap);
assertEquals(1, myResourceHistoryTableDao.mySaveCount);
assertEquals(0, myResourceHistoryTableDao.mySaveCount);
}
@Test
@ -632,12 +632,12 @@ public class GiantTransactionPerfTest {
@Override
public FlushModeType getFlushMode() {
throw new UnsupportedOperationException();
return FlushModeType.AUTO;
}
@Override
public void setFlushMode(FlushModeType flushMode) {
throw new UnsupportedOperationException();
// ignore
}
@Override

View File

@ -17,6 +17,7 @@ import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionMatcherInterc
import ca.uhn.fhir.jpa.subscription.submit.interceptor.SynchronousSubscriptionMatcherInterceptor;
import ca.uhn.fhir.jpa.test.util.StoppableSubscriptionDeliveringRestHookSubscriber;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.test.util.LogbackTestExtension;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.spi.ILoggingEvent;
import com.fasterxml.jackson.core.JsonProcessingException;
@ -28,6 +29,7 @@ import org.hl7.fhir.r4.model.Subscription;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -48,6 +50,9 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
public class AsyncSubscriptionMessageSubmissionIT extends BaseSubscriptionsR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(AsyncSubscriptionMessageSubmissionIT.class);
@RegisterExtension
public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension(AsyncResourceModifiedSubmitterSvc.class);
@SpyBean
IResourceModifiedConsumer myResourceModifiedConsumer;

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -2,12 +2,20 @@ package ca.uhn.fhir.jpa.dao.r5;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
import ca.uhn.fhir.jpa.entity.TermConceptMap;
import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.BundleBuilder;
import jakarta.annotation.Nonnull;
import org.hl7.fhir.r5.model.BooleanType;
import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.CodeSystem;
import org.hl7.fhir.r5.model.CodeType;
import org.hl7.fhir.r5.model.ConceptMap;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.Observation;
import org.hl7.fhir.r5.model.Parameters;
@ -15,6 +23,7 @@ import org.hl7.fhir.r5.model.Patient;
import org.hl7.fhir.r5.model.Quantity;
import org.hl7.fhir.r5.model.Reference;
import org.hl7.fhir.r5.model.UriType;
import org.hl7.fhir.r5.model.ValueSet;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
@ -22,14 +31,17 @@ import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.ValueSource;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import static org.apache.commons.lang3.StringUtils.countMatches;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.when;
public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
@ -363,21 +375,21 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
myCaptureQueriesListener.logSelectQueries();
if (theTargetAlreadyExists) {
if (theResourceChanges) {
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(5, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
} else {
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(5, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
}
} else {
if (theResourceChanges) {
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(7, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
} else {
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(7, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
}
@ -488,14 +500,14 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertNull(outcome.getEntry().get(0).getResponse().getLocation());
assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus());
assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/2");
assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/3");
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
logAllResources();
logAllResourceVersions();
actual = myPatientDao.read(resourceId, mySrd);
assertEquals("2", actual.getIdElement().getVersionIdPart());
assertEquals("3", actual.getIdElement().getVersionIdPart());
assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem());
assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem());
@ -505,11 +517,11 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertNull(outcome.getEntry().get(0).getResponse().getLocation());
assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus());
assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/3");
assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/5");
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
actual = myPatientDao.read(resourceId, mySrd);
assertEquals("3", actual.getIdElement().getVersionIdPart());
assertEquals("5", actual.getIdElement().getVersionIdPart());
assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem());
assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem());
}
@ -525,7 +537,6 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
}
@Test
public void testConditionalDeleteAndConditionalUpdateOnSameResource_MultipleMatchesAlreadyExist() {
@ -558,7 +569,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
// First pass (resource doesn't already exist)
outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext));
outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext, "FAMILY-0"));
assertNull(outcome.getEntry().get(0).getResponse().getLocation());
assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus());
assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/1");
@ -569,23 +580,26 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
assertEquals("1", actual.getIdElement().getVersionIdPart());
assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem());
assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem());
assertEquals("FAMILY-0", actual.getNameFirstRep().getFamily());
// Second pass (resource already exists)
myCaptureQueriesListener.clear();
outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext));
outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext, "FAMILY-1"));
myCaptureQueriesListener.logUpdateQueries();
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertNull(outcome.getEntry().get(0).getResponse().getLocation());
assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus());
assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/1");
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
resourceId = new IdType(outcome.getEntry().get(1).getResponse().getLocation()).toUnqualifiedVersionless();
actual = myPatientDao.read(resourceId, mySrd);
assertEquals("FAMILY-1", actual.getNameFirstRep().getFamily());
logAllResources();
logAllResourceVersions();
IdType resourceId2 = new IdType(outcome.getEntry().get(1).getResponse().getLocation()).toUnqualifiedVersionless();
assertThat(resourceId2.getIdPart()).isNotEqualTo(resourceId.getIdPart());
actual = myPatientDao.read(resourceId2, mySrd);
assertEquals("1", actual.getIdElement().getVersionIdPart());
assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem());
@ -593,7 +607,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
// Third pass (resource already exists)
outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext));
outcome = mySystemDao.transaction(mySrd, createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(myFhirContext, "FAMILY-2"));
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertNull(outcome.getEntry().get(0).getResponse().getLocation());
assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus());
@ -606,6 +620,7 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
assertEquals("1", actual.getIdElement().getVersionIdPart());
assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem());
assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem());
assertEquals("FAMILY-2", actual.getNameFirstRep().getFamily());
}
/**
@ -632,22 +647,32 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem());
assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem());
logAllResources();
logAllResourceVersions();
// Second pass (resource already exists)
myCaptureQueriesListener.clear();
outcome = mySystemDao.transaction(mySrd, createBundleWithDeleteAndUpdateOnSameResource(myFhirContext));
try {
outcome = mySystemDao.transaction(mySrd, createBundleWithDeleteAndUpdateOnSameResource(myFhirContext));
} finally {
myCaptureQueriesListener.logInsertQueries();
}
myCaptureQueriesListener.logUpdateQueries();
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertEquals("Patient/P/_history/2", outcome.getEntry().get(0).getResponse().getLocation());
assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus());
assertEquals("Patient/P/_history/2", outcome.getEntry().get(1).getResponse().getLocation());
assertEquals("Patient/P/_history/3", outcome.getEntry().get(1).getResponse().getLocation());
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
logAllResources();
logAllResourceVersions();
actual = myPatientDao.read(resourceId, mySrd);
assertEquals("2", actual.getIdElement().getVersionIdPart());
assertThrows(ResourceGoneException.class, ()->myPatientDao.read(resourceId.withVersion("2"), mySrd));
actual = myPatientDao.read(resourceId.withVersion("3"), mySrd);
assertEquals("3", actual.getIdElement().getVersionIdPart());
actual = myPatientDao.read(resourceId.toVersionless(), mySrd);
assertEquals("3", actual.getIdElement().getVersionIdPart());
assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem());
assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem());
@ -655,26 +680,47 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
outcome = mySystemDao.transaction(mySrd, createBundleWithDeleteAndUpdateOnSameResource(myFhirContext));
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertEquals("Patient/P/_history/3", outcome.getEntry().get(0).getResponse().getLocation());
assertEquals("Patient/P/_history/4", outcome.getEntry().get(0).getResponse().getLocation());
assertEquals("204 No Content", outcome.getEntry().get(0).getResponse().getStatus());
assertEquals("Patient/P/_history/3", outcome.getEntry().get(1).getResponse().getLocation());
assertEquals("Patient/P/_history/5", outcome.getEntry().get(1).getResponse().getLocation());
assertEquals("201 Created", outcome.getEntry().get(1).getResponse().getStatus());
actual = myPatientDao.read(resourceId, mySrd);
assertEquals("3", actual.getIdElement().getVersionIdPart());
actual = myPatientDao.read(resourceId.withVersion("5"), mySrd);
assertEquals("5", actual.getIdElement().getVersionIdPart());
assertEquals("http://foo", actual.getIdentifierFirstRep().getSystem());
assertEquals("http://tag", actual.getMeta().getTagFirstRep().getSystem());
}
@Test
public void testDeleteThenConditionalCreateOnTheSameResource() {
// Create a patient
myPatientDao.update(createPatientWithIdentifierAndTag().setActive(true), mySrd);
// Transaction which deletes that patient by ID and then recreates it conditionally
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionDeleteEntry(new IdType("Patient/P"));
bb.addTransactionUpdateEntry(createPatientWithIdentifierAndTag().setActive(false)).conditional("Patient?identifier=http://foo|bar");
Bundle outcome = mySystemDao.transaction(mySrd, bb.getBundleTyped());
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
assertThat(outcome.getEntry().get(0).getResponse().getLocation()).endsWith("_history/2");
assertThat(outcome.getEntry().get(1).getResponse().getLocation()).endsWith("_history/3");
// Verify
Patient patient = myPatientDao.read(new IdType("Patient/P"), mySrd);
assertFalse(patient.getActive());
assertEquals("3", patient.getIdElement().getVersionIdPart());
}
/**
* See #5110
*/
@Test
public void testTransactionWithMissingSystem() {
BundleBuilder bb = new BundleBuilder(myFhirContext);
BundleBuilder bb = new BundleBuilder(myFhirContext);
Patient patient = new Patient();
patient.setId(IdType.newRandomUuid());
@ -685,13 +731,149 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
patient.addName().setText("Jane Doe");
bb.addTransactionCreateEntry(patient);
Bundle inputBundle = bb.getBundleTyped();
Bundle inputBundle = bb.getBundleTyped();
Bundle outputBundle = mySystemDao.transaction(mySrd, inputBundle);
assertThat(outputBundle.getEntry().get(0).getResponse().getLocation()).matches("Patient/[0-9]+/_history/1");
}
/**
* Make sure we can successfully handle this in a transaction, including
* creating the extra table rows for this resource type
*/
@Test
public void testCreateCodeSystem() {
CodeSystem cs = newDummyCodeSystem();
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionCreateEntry(cs);
// Test
mySystemDao.transaction(mySrd, bb.getBundleTyped());
// Verify
runInTransaction(() -> {
List<TermCodeSystem> valueSets = myTermCodeSystemDao.findAll();
assertThat(valueSets).hasSize(1);
});
}
/**
* Make sure we can successfully handle this in a transaction, including
* creating the extra table rows for this resource type
*/
@Test
public void testCreateConceptMap() {
ConceptMap cm = newDummyConceptMap();
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionCreateEntry(cm);
// Test
mySystemDao.transaction(mySrd, bb.getBundleTyped());
// Verify
runInTransaction(() -> {
List<TermConceptMap> valueSets = myTermConceptMapDao.findAll();
assertThat(valueSets).hasSize(1);
});
}
/**
* Make sure we can successfully handle this in a transaction, including
* creating the extra table rows for this resource type
*/
@Test
public void testCreateValueSet() {
ValueSet vs = newDummyValueSet();
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionCreateEntry(vs);
// Test
mySystemDao.transaction(mySrd, bb.getBundleTyped());
// Verify
runInTransaction(() -> {
List<TermValueSet> valueSets = myTermValueSetDao.findAll();
assertThat(valueSets).hasSize(1);
});
}
/**
* Two resources with the same URL in a single transaction
* bundle should error gracefully
*/
@Test
public void testCreateDuplicateCodeSystem() {
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionCreateEntry(newDummyCodeSystem());
bb.addTransactionCreateEntry(newDummyCodeSystem());
// Test
try {
mySystemDao.transaction(mySrd, bb.getBundleTyped());
fail();
} catch (UnprocessableEntityException e) {
// Verify
assertThat(e.getMessage()).contains("Can not create multiple CodeSystem resources with CodeSystem.url");
}
}
/**
* Two resources with the same URL in a single transaction
* bundle should error gracefully
*/
@Test
public void testCreateDuplicateValueSet() {
BundleBuilder bb = new BundleBuilder(myFhirContext);
bb.addTransactionCreateEntry(newDummyValueSet());
bb.addTransactionCreateEntry(newDummyValueSet());
// Test
try {
mySystemDao.transaction(mySrd, bb.getBundleTyped());
fail();
} catch (UnprocessableEntityException e) {
// Verify
assertThat(e.getMessage()).contains("Can not create multiple ValueSet resources with ValueSet.url");
}
}
@Nonnull
private static CodeSystem newDummyCodeSystem() {
CodeSystem cs = new CodeSystem();
cs.setUrl("http://foo");
cs.setVersion("1.2.3");
cs.setStatus(Enumerations.PublicationStatus.ACTIVE);
cs.addConcept().setCode("HELLO");
return cs;
}
@Nonnull
private static ConceptMap newDummyConceptMap() {
ConceptMap cm = new ConceptMap();
cm.setUrl("http://foo");
cm.setVersion("1.2.3");
cm.setStatus(Enumerations.PublicationStatus.ACTIVE);
return cm;
}
@Nonnull
private static ValueSet newDummyValueSet() {
ValueSet vs = new ValueSet();
vs.setUrl("http://foo");
vs.setVersion("1.2.3");
vs.setStatus(Enumerations.PublicationStatus.ACTIVE);
vs.getCompose().addInclude().setSystem("http://hl7.org/fhir/administrative-gender");
return vs;
}
@Nonnull
private static Bundle createBundleWithConditionalDeleteAndConditionalUpdateOnSameResource(FhirContext theFhirContext) {
@ -705,13 +887,14 @@ public class FhirSystemDaoTransactionR5Test extends BaseJpaR5Test {
}
@Nonnull
private static Bundle createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(FhirContext theFhirContext) {
private static Bundle createBundleWithConditionalDeleteAndConditionalCreateOnSameResource(FhirContext theFhirContext, String theFamilyName) {
// Build a new bundle each time we need it
BundleBuilder bb = new BundleBuilder(theFhirContext);
bb.addTransactionDeleteConditionalEntry("Patient?identifier=http://foo|bar");
Patient patient = createPatientWithIdentifierAndTag();
patient.setId((String)null);
patient.setId((String) null);
patient.getNameFirstRep().setFamily(theFamilyName);
bb.addTransactionCreateEntry(patient).conditional("Patient?identifier=http://foo|bar");
return bb.getBundleTyped();
}

View File

@ -247,9 +247,9 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
// Verify SQL
// 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference
// 2- Resolve Practitioner/PR1 reference
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
// Verify correct indexes are written
@ -291,9 +291,9 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
// Verify SQL
// 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference
// 1- Resolve Practitioner/PR1 reference
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
// Verify correct indexes are written
@ -337,7 +337,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
// 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
// Verify correct indexes are written
@ -382,7 +382,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
// 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
// Verify correct indexes are written
@ -426,7 +426,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
// Verify SQL
// 1- Resolve resource forced IDs, and 2- Resolve Practitioner/PR1 reference
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(3, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
// Verify correct indexes are written
@ -476,7 +476,7 @@ public class UpliftedRefchainsAndChainedSortingR5Test extends BaseJpaR5Test {
// Verify SQL
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
assertEquals(10, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(9, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
// Verify correct indexes are written

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -566,9 +566,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
@RegisterExtension
private final PreventDanglingInterceptorsExtension myPreventDanglingInterceptorsExtension = new PreventDanglingInterceptorsExtension(()-> myInterceptorRegistry);
@RegisterExtension
public LogbackTestExtension myLogbackTestExtension = new LogbackTestExtension();
@AfterEach()
@Order(0)
public void afterCleanupDao() {

View File

@ -20,7 +20,9 @@
package ca.uhn.fhir.jpa.test;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.ParserOptions;
import ca.uhn.fhir.context.support.IValidationSupport;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.IInterceptorService;
import ca.uhn.fhir.interceptor.api.Pointcut;
@ -158,6 +160,7 @@ import java.util.stream.Stream;
import static ca.uhn.fhir.rest.api.Constants.HEADER_CACHE_CONTROL;
import static ca.uhn.fhir.util.TestUtil.doRandomizeLocaleAndTimezone;
import static java.util.stream.Collectors.joining;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.awaitility.Awaitility.await;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
@ -398,9 +401,13 @@ public abstract class BaseJpaTest extends BaseTest {
JpaStorageSettings defaultConfig = new JpaStorageSettings();
myStorageSettings.setAdvancedHSearchIndexing(defaultConfig.isAdvancedHSearchIndexing());
myStorageSettings.setAllowContainsSearches(defaultConfig.isAllowContainsSearches());
myStorageSettings.setDeleteEnabled(defaultConfig.isDeleteEnabled());
myStorageSettings.setIncludeHashIdentityForTokenSearches(defaultConfig.isIncludeHashIdentityForTokenSearches());
myStorageSettings.setMaximumIncludesToLoadPerPage(defaultConfig.getMaximumIncludesToLoadPerPage());
myStorageSettings.getTreatBaseUrlsAsLocal().clear();
ParserOptions defaultParserOptions = new ParserOptions();
myFhirContext.getParserOptions().setStripVersionsFromReferences(defaultParserOptions.isStripVersionsFromReferences());
}
@AfterEach
@ -938,7 +945,10 @@ public abstract class BaseJpaTest extends BaseTest {
dao.read(theId, mySrd);
fail("");
} catch (ResourceNotFoundException e) {
// good
assertThat(e.getMessage()).containsAnyOf(
Msg.code(1996) + "Resource " + theId.toUnqualifiedVersionless().getValue() + " is not known",
Msg.code(2001) + "Resource " + theId.toUnqualifiedVersionless().getValue() + " is not known"
);
}
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>7.7.6-SNAPSHOT</version>
<version>7.7.7-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -24,7 +24,9 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.mdm.api.IMdmLinkQuerySvc;
import ca.uhn.fhir.mdm.api.IMdmSurvivorshipService;
import ca.uhn.fhir.mdm.api.MdmMatchResultEnum;
@ -40,6 +42,7 @@ import ca.uhn.fhir.util.TerserUtil;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBase;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.data.domain.Page;
import java.util.ArrayList;
@ -153,12 +156,15 @@ public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService {
Page<MdmLinkJson> linksQuery = myMdmLinkQuerySvc.queryLinks(searchParameters, theMdmTransactionContext);
// we want it ordered
List<String> sourceIds = new ArrayList<>();
List<IIdType> sourceIds = new ArrayList<>();
linksQuery.forEach(link -> {
String sourceId = link.getSourceId();
// we want only the id part, not the resource type
sourceId = sourceId.replace(resourceType + "/", "");
sourceIds.add(sourceId);
if (!sourceId.contains("/")) {
sourceId = resourceType + "/" + sourceId;
}
IIdType id = myFhirContext.getVersion().newIdType();
id.setValue(sourceId);
sourceIds.add(id);
});
Map<String, IResourcePersistentId> sourceIdToPid = new HashMap<>();
if (!sourceIds.isEmpty()) {
@ -166,15 +172,19 @@ public class MdmSurvivorshipSvcImpl implements IMdmSurvivorshipService {
myTransactionService
.withRequest(new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.allPartitions()))
.execute(() -> {
Map<String, ? extends IResourcePersistentId> ids =
myIIdHelperService.resolveResourcePersistentIds(
RequestPartitionId.allPartitions(), resourceType, sourceIds);
sourceIdToPid.putAll(ids);
Map<IIdType, ? extends IResourceLookup<?>> ids = myIIdHelperService.resolveResourceIdentities(
RequestPartitionId.allPartitions(),
sourceIds,
ResolveIdentityMode.includeDeleted().cacheOk());
for (Map.Entry<IIdType, ? extends IResourceLookup<?>> entry : ids.entrySet()) {
sourceIdToPid.put(
entry.getKey().getIdPart(), entry.getValue().getPersistentId());
}
});
}
return sourceIds.stream().map(id -> {
IResourcePersistentId<?> pid = sourceIdToPid.get(id);
IResourcePersistentId<?> pid = sourceIdToPid.get(id.getIdPart());
return dao.readByPid(pid);
});
}

Some files were not shown because too many files have changed in this diff Show More