merge master

This commit is contained in:
Ken Stevens 2024-12-02 12:47:36 -05:00
commit 551e1a5d63
98 changed files with 3838 additions and 1029 deletions

View File

@ -153,7 +153,7 @@ class DefaultProfileValidationSupportBundleStrategy implements IValidationSuppor
if (myCodeSystems != null) {
retVal.addAll(myCodeSystems.values());
}
if (myStructureDefinitionResources != null) {
if (myStructureDefinitions != null) {
retVal.addAll(myStructureDefinitions.values());
}
if (myValueSets != null) {

View File

@ -19,9 +19,22 @@
*/
package ca.uhn.fhir.rest.param;
import java.util.Collections;
import java.util.Map;
public class HistorySearchDateRangeParam extends DateRangeParam {
/**
* Constructor
*
* @since 8.0.0
*/
public HistorySearchDateRangeParam() {
this(Collections.emptyMap(), new DateRangeParam(), null);
}
/**
* Constructor
*/
public HistorySearchDateRangeParam(
Map<String, String[]> theParameters, DateRangeParam theDateRange, Integer theOffset) {
super(theDateRange);

View File

@ -29,7 +29,7 @@ public class FileUtil {
if (theBytes <= 0) {
return "0 " + UNITS[0];
}
int digitGroups = (int) (Math.log10(theBytes) / Math.log10(1024));
int digitGroups = (int) (Math.log10((double) theBytes) / Math.log10(1024));
digitGroups = Math.min(digitGroups, UNITS.length - 1);
return new DecimalFormat("###0.#").format(theBytes / Math.pow(1024, digitGroups)) + " " + UNITS[digitGroups];
}

View File

@ -0,0 +1,7 @@
---
type: fix
issue: 6409
title: "When performing a `_history` query using the `_at` parameter, the time value
is now converted to a zoned-date before being passed to the database. This should
avoid conflicts around date changes on some databases.
"

View File

@ -0,0 +1,7 @@
---
type: perf
issue: 6409
title: "When searching in versioned tag mode, the JPA server now avoids a redundant
lookup of the un-versioned tags, avoiding an extra unnecessary database query
in some cases.
"

View File

@ -0,0 +1,11 @@
---
type: perf
issue: 6409
title: "The JPA server will no longer use the HFJ_RES_VER_PROV table to store and index values from
the `Resource.meta.source` element. Beginning in HAPI FHIR 6.8.0 (and Smile CDR 2023.08.R01), a
new pair of columns have been used to store data for this element, so this change only affects
data which was stored in HAPI FHIR prior to version 6.8.0 (released August 2023). If you have
FHIR resources which were stored in a JPA server prior to this version, and you use the
Resource.meta.source element and/or the `_source` search parameter, you should perform a complete
reindex of your server to ensure that data is not lost. See the upgrade notes for more information.
"

View File

@ -1,4 +1,20 @@
# Upgrade Notes
The JPA server stores values for the field `Resource.meta.source` in dedicated columns in its database so that they can be indexes and searched for as needed, using the `_source` Search Parameter.
Prior to HAPI FHIR 6.8.0 (and Smile CDR 2023.08.R01), these values were stored in a dedicated table called `HFJ_RES_VER_PROV`. Beginning in HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01), two new columns were added to the `HFJ_RES_VER`
table which store the same data and make it available for searches.
As of HAPI FHIR 8.0.0, the legacy table is no longer searched by default. If you do not have Resource.meta.source data stored in HAPI FHIR that was last created/updated prior to version 6.8.0, this change will not affect you and no action needs to be taken.
If you do have such data, you should follow the following steps:
* Enable the JpaStorageSettings setting `setAccessMetaSourceInformationFromProvenanceTable(true)` to configure the server to continue using the legacy table.
* Perform a server resource reindex by invoking the [$reindex Operation (server)](https://smilecdr.com/docs/fhir_repository/search_parameter_reindexing.html#reindex-server) with the `optimizeStorage` parameter set to `ALL_VERSIONS`.
* When this reindex operation has successfully completed, the setting above can be disabled. Disabling this setting avoids an extra database round-trip when loading data, so this change will have a positive performance impact on your server.
# Fulltext Search with _lastUpdated Filter
Fulltext searches have been updated to support `_lastUpdated` search parameter. A reindexing of Search Parameters
is required to migrate old data to support the `_lastUpdated` search parameter.
Fulltext searches have been updated to support `_lastUpdated` search parameter. If you are using Advanced Hibernate Search indexing and wish to use the `_lastUpdated` search parameetr with this feature, a full reindex of your repository is required.

View File

@ -121,11 +121,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
@ -699,8 +700,15 @@ public class JpaConfig {
@Bean
@Scope("prototype")
public SourcePredicateBuilder newSourcePredicateBuilder(SearchQueryBuilder theSearchBuilder) {
return new SourcePredicateBuilder(theSearchBuilder);
public ResourceHistoryPredicateBuilder newResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchBuilder) {
return new ResourceHistoryPredicateBuilder(theSearchBuilder);
}
@Bean
@Scope("prototype")
public ResourceHistoryProvenancePredicateBuilder newResourceHistoryProvenancePredicateBuilder(
SearchQueryBuilder theSearchBuilder) {
return new ResourceHistoryProvenancePredicateBuilder(theSearchBuilder);
}
@Bean

View File

@ -29,7 +29,6 @@ import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
@ -89,9 +88,6 @@ public class SearchConfig {
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private IResourceSearchViewDao myResourceSearchViewDao;
@Autowired
private FhirContext myContext;
@ -169,7 +165,6 @@ public class SearchConfig {
myInterceptorBroadcaster,
myResourceTagDao,
myDaoRegistry,
myResourceSearchViewDao,
myContext,
myIdHelperService,
theResourceType);

View File

@ -57,7 +57,6 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -561,8 +560,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
} else {
ResourceHistoryTable currentHistoryVersion = theEntity.getCurrentVersionEntity();
if (currentHistoryVersion == null) {
currentHistoryVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
theEntity.getId(), theEntity.getVersion());
currentHistoryVersion =
myResourceHistoryTableDao.findForIdAndVersion(theEntity.getId(), theEntity.getVersion());
}
if (currentHistoryVersion == null || !currentHistoryVersion.hasResource()) {
changed = true;
@ -1083,7 +1082,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
*/
if (thePerformIndexing) {
if (newParams == null) {
myExpungeService.deleteAllSearchParams(JpaPid.fromId(entity.getId()));
myExpungeService.deleteAllSearchParams(entity.getPersistentId());
entity.clearAllParamsPopulated();
} else {
@ -1315,8 +1314,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
* this could return null if the current resourceVersion has been expunged
* in which case we'll still create a new one
*/
historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
theEntity.getResourceId(), resourceVersion - 1);
historyEntry =
myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), resourceVersion - 1);
if (historyEntry != null) {
reusingHistoryEntity = true;
theEntity.populateHistoryEntityVersionAndDates(historyEntry);
@ -1374,29 +1373,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
boolean haveSource = isNotBlank(source) && shouldStoreSource;
boolean haveRequestId = isNotBlank(requestId) && shouldStoreRequestId;
if (haveSource || haveRequestId) {
ResourceHistoryProvenanceEntity provenance = null;
if (reusingHistoryEntity) {
/*
* If version history is disabled, then we may be reusing
* a previous history entity. If that's the case, let's try
* to reuse the previous provenance entity too.
*/
provenance = historyEntry.getProvenance();
}
if (provenance == null) {
provenance = historyEntry.toProvenance();
}
provenance.setResourceHistoryTable(historyEntry);
provenance.setResourceTable(theEntity);
provenance.setPartitionId(theEntity.getPartitionId());
if (haveRequestId) {
String persistedRequestId = left(requestId, Constants.REQUEST_ID_LENGTH);
provenance.setRequestId(persistedRequestId);
historyEntry.setRequestId(persistedRequestId);
}
if (haveSource) {
String persistedSource = left(source, ResourceHistoryTable.SOURCE_URI_LENGTH);
provenance.setSourceUri(persistedSource);
historyEntry.setSourceUri(persistedSource);
}
if (theResource != null) {
@ -1406,8 +1388,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
shouldStoreRequestId ? requestId : null,
theResource);
}
myEntityManager.persist(provenance);
}
}

View File

@ -44,6 +44,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
@ -52,6 +53,7 @@ import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
@ -206,6 +208,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Autowired
private IJobCoordinator myJobCoordinator;
@Autowired
private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao;
private IInstanceValidatorModule myInstanceValidator;
private String myResourceName;
private Class<T> myResourceType;
@ -562,7 +567,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
thePerformIndexing);
// Store the resource forced ID if necessary
JpaPid jpaPid = JpaPid.fromId(updatedEntity.getResourceId());
JpaPid jpaPid = updatedEntity.getPersistentId();
// Populate the resource with its actual final stored ID from the entity
theResource.setId(entity.getIdDt());
@ -570,9 +575,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// Pre-cache the resource ID
jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext));
String fhirId = entity.getFhirId();
if (fhirId == null) {
fhirId = Long.toString(entity.getId());
}
assert fhirId != null;
myIdHelperService.addResolvedPidToFhirIdAfterCommit(
jpaPid, theRequestPartitionId, getResourceName(), fhirId, null);
theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid);
@ -1016,7 +1019,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
protected ResourceTable updateEntityForDelete(
RequestDetails theRequest, TransactionDetails theTransactionDetails, ResourceTable theEntity) {
myResourceSearchUrlSvc.deleteByResId(theEntity.getId());
myResourceSearchUrlSvc.deleteByResId(theEntity.getPersistentId());
Date updateTime = new Date();
return updateEntity(theRequest, null, theEntity, updateTime, true, true, theTransactionDetails, false, true);
}
@ -1261,7 +1264,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return myPersistedJpaBundleProviderFactory.history(
theRequest,
myResourceName,
entity.getId(),
entity.getPersistentId(),
theSince,
theUntil,
theOffset,
@ -1291,7 +1294,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
return myPersistedJpaBundleProviderFactory.history(
theRequest,
myResourceName,
entity.getId(),
JpaPid.fromId(entity.getId()),
theHistorySearchDateRangeParam.getLowerBoundAsInstant(),
theHistorySearchDateRangeParam.getUpperBoundAsInstant(),
theHistorySearchDateRangeParam.getOffset(),
@ -1391,8 +1394,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
doMetaAdd(theMetaAdd, latestVersion, theRequest, transactionDetails);
// Also update history entry
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
entity.getId(), entity.getVersion());
ResourceHistoryTable history =
myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion());
doMetaAdd(theMetaAdd, history, theRequest, transactionDetails);
}
@ -1439,8 +1442,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} else {
doMetaDelete(theMetaDel, latestVersion, theRequest, transactionDetails);
// Also update history entry
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
entity.getId(), entity.getVersion());
ResourceHistoryTable history =
myResourceHistoryTableDao.findForIdAndVersion(entity.getId(), entity.getVersion());
doMetaDelete(theMetaDel, history, theRequest, transactionDetails);
}
@ -1705,7 +1708,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
ResourceTable entity, ReindexParameters.OptimizeStorageModeEnum theOptimizeStorageMode) {
ResourceHistoryTable historyEntity = entity.getCurrentVersionEntity();
if (historyEntity != null) {
reindexOptimizeStorageHistoryEntity(entity, historyEntity);
reindexOptimizeStorageHistoryEntityThenDetachIt(entity, historyEntity);
if (theOptimizeStorageMode == ReindexParameters.OptimizeStorageModeEnum.ALL_VERSIONS) {
int pageSize = 100;
for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) {
@ -1715,38 +1718,43 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
// different pages as the underlying data gets updated.
PageRequest pageRequest = PageRequest.of(page, pageSize, Sort.by("myId"));
Slice<ResourceHistoryTable> historyEntities =
myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance(
myResourceHistoryTableDao.findAllVersionsExceptSpecificForResourcePid(
pageRequest, entity.getId(), historyEntity.getVersion());
for (ResourceHistoryTable next : historyEntities) {
reindexOptimizeStorageHistoryEntity(entity, next);
reindexOptimizeStorageHistoryEntityThenDetachIt(entity, next);
}
}
}
}
}
private void reindexOptimizeStorageHistoryEntity(ResourceTable entity, ResourceHistoryTable historyEntity) {
boolean changed = false;
/**
* Note that the entity will be detached after being saved if it has changed
* in order to avoid growing the number of resources in memory to be too big
*/
private void reindexOptimizeStorageHistoryEntityThenDetachIt(
ResourceTable entity, ResourceHistoryTable historyEntity) {
if (historyEntity.getEncoding() == ResourceEncodingEnum.JSONC
|| historyEntity.getEncoding() == ResourceEncodingEnum.JSON) {
byte[] resourceBytes = historyEntity.getResource();
if (resourceBytes != null) {
String resourceText = decodeResource(resourceBytes, historyEntity.getEncoding());
if (myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText)) {
changed = true;
}
myResourceHistoryCalculator.conditionallyAlterHistoryEntity(entity, historyEntity, resourceText);
}
}
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
if (historyEntity.getProvenance() != null) {
historyEntity.setSourceUri(historyEntity.getProvenance().getSourceUri());
historyEntity.setRequestId(historyEntity.getProvenance().getRequestId());
changed = true;
Long id = historyEntity.getId();
Optional<ResourceHistoryProvenanceEntity> provenanceEntityOpt =
myResourceHistoryProvenanceDao.findById(id);
if (provenanceEntityOpt.isPresent()) {
ResourceHistoryProvenanceEntity provenanceEntity = provenanceEntityOpt.get();
historyEntity.setSourceUri(provenanceEntity.getSourceUri());
historyEntity.setRequestId(provenanceEntity.getRequestId());
myResourceHistoryProvenanceDao.delete(provenanceEntity);
}
}
if (changed) {
myResourceHistoryTableDao.save(historyEntity);
}
}

View File

@ -206,7 +206,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
* However, for realistic average workloads, this should reduce the number of round trips.
*/
if (!idChunk.isEmpty()) {
List<ResourceTable> entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk);
List<ResourceTable> entityChunk = prefetchResourceTableAndHistory(idChunk);
if (thePreFetchIndexes) {
@ -244,14 +244,13 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
}
@Nonnull
private List<ResourceTable> prefetchResourceTableHistoryAndProvenance(List<Long> idChunk) {
private List<ResourceTable> prefetchResourceTableAndHistory(List<Long> idChunk) {
assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked";
Query query = myEntityManager.createQuery("select r, h "
+ " FROM ResourceTable r "
+ " LEFT JOIN fetch ResourceHistoryTable h "
+ " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId "
+ " left join fetch h.myProvenance "
+ " WHERE r.myId IN ( :IDS ) ");
query.setParameter("IDS", idChunk);

View File

@ -219,7 +219,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
// indicate param was already processed, otherwise queries DB to process it
theParams.setOffset(null);
return SearchQueryExecutors.from(longs);
return SearchQueryExecutors.from(JpaPid.fromLongList(longs));
}
private int getMaxFetchSize(SearchParameterMap theParams, Integer theMax) {
@ -386,7 +386,6 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
@SuppressWarnings("rawtypes")
private List<IResourcePersistentId> toList(ISearchQueryExecutor theSearchResultStream, long theMaxSize) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theSearchResultStream, 0), false)
.map(JpaPid::fromId)
.limit(theMaxSize)
.collect(Collectors.toList());
}

View File

@ -40,7 +40,6 @@ import jakarta.persistence.TypedQuery;
import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Expression;
import jakarta.persistence.criteria.JoinType;
import jakarta.persistence.criteria.Predicate;
import jakarta.persistence.criteria.Root;
import jakarta.persistence.criteria.Subquery;
@ -125,8 +124,6 @@ public class HistoryBuilder {
addPredicatesToQuery(cb, thePartitionId, criteriaQuery, from, theHistorySearchStyle);
from.fetch("myProvenance", JoinType.LEFT);
/*
* The sort on myUpdated is the important one for _history operations, but there are
* cases where multiple pages of results all have the exact same myUpdated value (e.g.

View File

@ -19,15 +19,15 @@
*/
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import jakarta.annotation.Nullable;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.Collection;
public interface IJpaStorageResourceParser extends IStorageResourceParser {
public interface IJpaStorageResourceParser extends IStorageResourceParser<JpaPid> {
/**
* Convert a storage entity into a FHIR resource model instance. This method may return null if the entity is not
@ -36,7 +36,7 @@ public interface IJpaStorageResourceParser extends IStorageResourceParser {
<R extends IBaseResource> R toResource(
Class<R> theResourceType,
IBaseResourceEntity theEntity,
Collection<ResourceTag> theTagList,
Collection<BaseTag> theTagList,
boolean theForHistoryOperation);
/**

View File

@ -26,9 +26,9 @@ import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
@ -37,9 +37,9 @@ import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
@ -71,12 +71,13 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.decodeResource;
import static java.util.Objects.nonNull;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public class JpaStorageResourceParser implements IJpaStorageResourceParser {
@ -92,6 +93,9 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
@Autowired
private IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao;
@Autowired
private PartitionSettings myPartitionSettings;
@ -115,14 +119,14 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
public <R extends IBaseResource> R toResource(
Class<R> theResourceType,
IBaseResourceEntity theEntity,
Collection<ResourceTag> theTagList,
Collection<BaseTag> theTagList,
boolean theForHistoryOperation) {
// 1. get resource, it's encoding and the tags if any
byte[] resourceBytes;
String resourceText;
ResourceEncodingEnum resourceEncoding;
@Nullable Collection<? extends BaseTag> tagList = Collections.emptyList();
@Nullable Collection<? extends BaseTag> tagList;
long version;
String provenanceSourceUri = null;
String provenanceRequestId = null;
@ -132,6 +136,12 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
resourceBytes = history.getResource();
resourceText = history.getResourceTextVc();
resourceEncoding = history.getEncoding();
// For search results we get the list of tags passed in because we load it
// in bulk for all resources we're going to return, but for read results
// we don't get the list passed in so we need to load it here.
tagList = theTagList;
if (tagList == null) {
switch (myStorageSettings.getTagStorageMode()) {
case VERSIONED:
default:
@ -147,10 +157,21 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
case INLINE:
tagList = null;
}
}
version = history.getVersion();
if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId();
provenanceSourceUri = history.getProvenance().getSourceUri();
provenanceSourceUri = history.getSourceUri();
provenanceRequestId = history.getRequestId();
if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) {
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
Optional<ResourceHistoryProvenanceEntity> provenanceOpt =
myResourceHistoryProvenanceDao.findById(history.getId());
if (provenanceOpt.isPresent()) {
ResourceHistoryProvenanceEntity provenance = provenanceOpt.get();
provenanceRequestId = provenance.getRequestId();
provenanceSourceUri = provenance.getSourceUri();
}
}
}
} else if (theEntity instanceof ResourceTable) {
ResourceTable resource = (ResourceTable) theEntity;
@ -159,14 +180,13 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
history = resource.getCurrentVersionEntity();
} else {
version = theEntity.getVersion();
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theEntity.getId(), version);
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version);
((ResourceTable) theEntity).setCurrentVersionEntity(history);
while (history == null) {
if (version > 1L) {
version--;
history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
theEntity.getId(), version);
history = myResourceHistoryTableDao.findForIdAndVersion(theEntity.getResourceId(), version);
} else {
return null;
}
@ -181,36 +201,28 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
case NON_VERSIONED:
if (resource.isHasTags()) {
tagList = resource.getTags();
} else {
tagList = List.of();
}
break;
case INLINE:
default:
tagList = null;
break;
}
version = history.getVersion();
if (history.getProvenance() != null) {
provenanceRequestId = history.getProvenance().getRequestId();
provenanceSourceUri = history.getProvenance().getSourceUri();
provenanceSourceUri = history.getSourceUri();
provenanceRequestId = history.getRequestId();
if (isBlank(provenanceSourceUri) && isBlank(provenanceRequestId)) {
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
Optional<ResourceHistoryProvenanceEntity> provenanceOpt =
myResourceHistoryProvenanceDao.findById(history.getId());
if (provenanceOpt.isPresent()) {
ResourceHistoryProvenanceEntity provenance = provenanceOpt.get();
provenanceRequestId = provenance.getRequestId();
provenanceSourceUri = provenance.getSourceUri();
}
} else if (theEntity instanceof ResourceSearchView) {
// This is the search View
ResourceSearchView view = (ResourceSearchView) theEntity;
resourceBytes = view.getResource();
resourceText = view.getResourceTextVc();
resourceEncoding = view.getEncoding();
version = view.getVersion();
provenanceRequestId = view.getProvenanceRequestId();
provenanceSourceUri = view.getProvenanceSourceUri();
switch (myStorageSettings.getTagStorageMode()) {
case VERSIONED:
case NON_VERSIONED:
if (theTagList != null) {
tagList = theTagList;
}
break;
case INLINE:
tagList = null;
break;
}
} else {
// something wrong
@ -277,7 +289,7 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
} else if (theResourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = new TolerantJsonParser(
getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId());
getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getResourceId());
try {
retVal = parser.parseResource(theResourceType, theDecodedResourceText);
@ -519,8 +531,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
theResourceTarget.setId(id);
if (theResourceTarget instanceof IResource) {
ResourceMetadataKeyEnum.VERSION.put((IResource) theResourceTarget, id.getVersionIdPart());
ResourceMetadataKeyEnum.UPDATED.put((IResource) theResourceTarget, theEntitySource.getUpdated());
ResourceMetadataKeyEnum.VERSION.put(theResourceTarget, id.getVersionIdPart());
ResourceMetadataKeyEnum.UPDATED.put(theResourceTarget, theEntitySource.getUpdated());
} else {
IBaseMetaType meta = theResourceTarget.getMeta();
meta.setVersionId(id.getVersionIdPart());

View File

@ -38,10 +38,8 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
@Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :resId ORDER BY t.myResourceVersion ASC")
List<ResourceHistoryTable> findAllVersionsForResourceIdInOrder(@Param("resId") Long theId);
@Query(
"SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
ResourceHistoryTable findForIdAndVersionAndFetchProvenance(
@Param("id") long theId, @Param("version") long theVersion);
@Query("SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :id AND t.myResourceVersion = :version")
ResourceHistoryTable findForIdAndVersion(@Param("id") long theId, @Param("version") long theVersion);
@Query(
"SELECT t.myId FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion")
@ -49,8 +47,8 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query(
"SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion")
Slice<ResourceHistoryTable> findForResourceIdAndReturnEntitiesAndFetchProvenance(
"SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion")
Slice<ResourceHistoryTable> findAllVersionsExceptSpecificForResourcePid(
Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query("" + "SELECT v.myId FROM ResourceHistoryTable v "
@ -91,4 +89,10 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
@Query(
"UPDATE ResourceHistoryTable r SET r.myResourceTextVc = null, r.myResource = :text, r.myEncoding = 'JSONC' WHERE r.myId = :pid")
void updateNonInlinedContents(@Param("text") byte[] theText, @Param("pid") long thePid);
@Query("SELECT v FROM ResourceHistoryTable v " + "JOIN FETCH v.myResourceTable t "
+ "WHERE v.myResourceId IN (:pids) "
+ "AND t.myVersion = v.myResourceVersion")
List<ResourceHistoryTable> findCurrentVersionsByResourcePidsAndFetchResourceTable(
@Param("pids") List<Long> theVersionlessPids);
}

View File

@ -25,9 +25,15 @@ import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.Collection;
public interface IResourceHistoryTagDao extends JpaRepository<ResourceHistoryTag, Long>, IHapiFhirJpaRepository {
@Modifying
@Query("DELETE FROM ResourceHistoryTag t WHERE t.myResourceHistoryPid = :historyPid")
void deleteByPid(@Param("historyPid") Long theResourceHistoryTablePid);
@Query(
"SELECT t FROM ResourceHistoryTag t INNER JOIN FETCH t.myTag WHERE t.myResourceHistory.myId IN (:historyPids)")
Collection<ResourceHistoryTag> findByVersionIds(@Param("historyPids") Collection<Long> theIdList);
}

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.dao.data;
import ca.uhn.fhir.jpa.entity.SearchResult;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
@ -28,6 +29,7 @@ import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@ -55,4 +57,16 @@ public interface ISearchResultDao extends JpaRepository<SearchResult, Long>, IHa
@Query("SELECT count(r) FROM SearchResult r WHERE r.mySearchPid = :search")
int countForSearch(@Param("search") Long theSearchPid);
/**
* Converts a response from {@link #findWithSearchPid(Long, Pageable)} to
* a List of JpaPid objects
*/
static List<JpaPid> toJpaPidList(List<Long> theArrays) {
List<JpaPid> retVal = new ArrayList<>(theArrays.size());
for (Long next : theArrays) {
retVal.add(JpaPid.fromId(next));
}
return retVal;
}
}

View File

@ -321,10 +321,12 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
nativeQuery.setMaxResults(800);
List pids = nativeQuery.getResultList();
if (!pids.isEmpty()) {
nativeQuery = myEntityManager.createQuery("DELETE FROM " + theEntityType.getSimpleName()
+ " WHERE " + idProperty + " IN (:pids)");
nativeQuery.setParameter("pids", pids);
nativeQuery.executeUpdate();
}
return pids.size();
});

View File

@ -45,6 +45,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
@ -71,6 +72,7 @@ import org.springframework.transaction.support.TransactionSynchronizationManager
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
@Service
@ -159,8 +161,7 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
Slice<Long> ids;
if (theJpaPid != null && theJpaPid.getId() != null) {
if (theJpaPid.getVersion() != null) {
ids = toSlice(myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
theJpaPid.getId(), theJpaPid.getVersion()));
ids = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theJpaPid.getId(), theJpaPid.getVersion()));
} else {
ids = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResourceId(page, theJpaPid.getId());
}
@ -239,9 +240,10 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
callHooks(theRequestDetails, theRemainingCount, version, id);
if (version.getProvenance() != null) {
myResourceHistoryProvenanceTableDao.deleteByPid(
version.getProvenance().getId());
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
Optional<ResourceHistoryProvenanceEntity> provenanceOpt =
myResourceHistoryProvenanceTableDao.findById(theNextVersionId);
provenanceOpt.ifPresent(entity -> myResourceHistoryProvenanceTableDao.deleteByPid(entity.getId()));
}
myResourceHistoryTagDao.deleteByPid(version.getId());
@ -302,8 +304,8 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
RequestDetails theRequestDetails, Long theResourceId, AtomicInteger theRemainingCount) {
ResourceTable resource = myResourceTableDao.findById(theResourceId).orElseThrow(IllegalStateException::new);
ResourceHistoryTable currentVersion = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(
resource.getId(), resource.getVersion());
ResourceHistoryTable currentVersion =
myResourceHistoryTableDao.findForIdAndVersion(resource.getId(), resource.getVersion());
if (currentVersion != null) {
expungeHistoricalVersion(theRequestDetails, currentVersion.getId(), theRemainingCount);
}

View File

@ -19,6 +19,7 @@
*/
package ca.uhn.fhir.jpa.dao.search;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
import org.hibernate.search.engine.search.query.SearchScroll;
import org.hibernate.search.engine.search.query.SearchScrollResult;
@ -57,12 +58,12 @@ public class SearchScrollQueryExecutorAdaptor implements ISearchQueryExecutor {
}
@Override
public Long next() {
public JpaPid next() {
Long result = myCurrentIterator.next();
// was this the last in the current scroll page?
if (!myCurrentIterator.hasNext()) {
advanceNextScrollPage();
}
return result;
return JpaPid.fromId(result);
}
}

View File

@ -1,244 +0,0 @@
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.entity;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.api.Constants;
import jakarta.annotation.Nullable;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Id;
import jakarta.persistence.Lob;
import jakarta.persistence.Temporal;
import jakarta.persistence.TemporalType;
import org.hibernate.annotations.Immutable;
import org.hibernate.annotations.Subselect;
import java.io.Serializable;
import java.util.Date;
@SuppressWarnings("SqlDialectInspection")
@Entity
@Immutable
// Ideally, all tables and columns should be in UPPERCASE if we ever choose to use a case-sensitive collation for MSSQL
// and there's a risk that queries on lowercase database objects fail.
@Subselect("SELECT h.PID as PID, "
+ " r.RES_ID as RES_ID, "
+ " h.RES_TYPE as RES_TYPE, "
+ " h.RES_VERSION as RES_VERSION, "
// FHIR version
+ " h.RES_VER as RES_VER, "
// resource version
+ " h.HAS_TAGS as HAS_TAGS, "
+ " h.RES_DELETED_AT as RES_DELETED_AT, "
+ " h.RES_PUBLISHED as RES_PUBLISHED, "
+ " h.RES_UPDATED as RES_UPDATED, "
+ " h.RES_TEXT as RES_TEXT, "
+ " h.RES_TEXT_VC as RES_TEXT_VC, "
+ " h.RES_ENCODING as RES_ENCODING, "
+ " h.PARTITION_ID as PARTITION_ID, "
+ " p.SOURCE_URI as PROV_SOURCE_URI,"
+ " p.REQUEST_ID as PROV_REQUEST_ID,"
+ " r.FHIR_ID as FHIR_ID "
+ "FROM HFJ_RESOURCE r "
+ " INNER JOIN HFJ_RES_VER h ON r.RES_ID = h.RES_ID and r.RES_VER = h.RES_VER"
+ " LEFT OUTER JOIN HFJ_RES_VER_PROV p ON p.RES_VER_PID = h.PID ")
public class ResourceSearchView implements IBaseResourceEntity, Serializable {
private static final long serialVersionUID = 1L;
@Id
@Column(name = "PID")
private Long myId;
@Column(name = "RES_ID")
private Long myResourceId;
@Column(name = "RES_TYPE", length = Constants.MAX_RESOURCE_NAME_LENGTH)
private String myResourceType;
@Column(name = "RES_VERSION")
@Enumerated(EnumType.STRING)
private FhirVersionEnum myFhirVersion;
@Column(name = "RES_VER")
private Long myResourceVersion;
@Column(name = "PROV_REQUEST_ID", length = Constants.REQUEST_ID_LENGTH)
private String myProvenanceRequestId;
@Column(name = "PROV_SOURCE_URI", length = ResourceHistoryTable.SOURCE_URI_LENGTH)
private String myProvenanceSourceUri;
@Column(name = "HAS_TAGS")
private boolean myHasTags;
@Column(name = "RES_DELETED_AT")
@Temporal(TemporalType.TIMESTAMP)
private Date myDeleted;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "RES_PUBLISHED")
private Date myPublished;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "RES_UPDATED")
private Date myUpdated;
@Column(name = "RES_TEXT")
@Lob()
private byte[] myResource;
@Column(name = "RES_TEXT_VC")
private String myResourceTextVc;
@Column(name = "RES_ENCODING")
@Enumerated(EnumType.STRING)
private ResourceEncodingEnum myEncoding;
@Column(name = "FHIR_ID", length = ResourceTable.MAX_FORCED_ID_LENGTH)
private String myFhirId;
@Column(name = "PARTITION_ID")
private Integer myPartitionId;
public ResourceSearchView() {
// public constructor for Hibernate
}
public String getResourceTextVc() {
return myResourceTextVc;
}
public String getProvenanceRequestId() {
return myProvenanceRequestId;
}
public String getProvenanceSourceUri() {
return myProvenanceSourceUri;
}
@Override
public Date getDeleted() {
return myDeleted;
}
public void setDeleted(Date theDate) {
myDeleted = theDate;
}
@Override
public FhirVersionEnum getFhirVersion() {
return myFhirVersion;
}
public void setFhirVersion(FhirVersionEnum theFhirVersion) {
myFhirVersion = theFhirVersion;
}
public String getFhirId() {
return myFhirId;
}
@Override
public Long getId() {
return myResourceId;
}
@Override
public IdDt getIdDt() {
if (myFhirId == null) {
Long id = myResourceId;
return new IdDt(myResourceType + '/' + id + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
} else {
return new IdDt(getResourceType() + '/' + getFhirId() + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
}
}
@Override
public InstantDt getPublished() {
if (myPublished != null) {
return new InstantDt(myPublished);
} else {
return null;
}
}
public void setPublished(Date thePublished) {
myPublished = thePublished;
}
@Override
public Long getResourceId() {
return myResourceId;
}
@Override
public String getResourceType() {
return myResourceType;
}
@Override
public InstantDt getUpdated() {
return new InstantDt(myUpdated);
}
@Override
public Date getUpdatedDate() {
return myUpdated;
}
@Override
public long getVersion() {
return myResourceVersion;
}
@Override
public boolean isHasTags() {
return myHasTags;
}
@Override
@Nullable
public PartitionablePartitionId getPartitionId() {
if (myPartitionId != null) {
return new PartitionablePartitionId(myPartitionId, null);
} else {
return null;
}
}
public byte[] getResource() {
return myResource;
}
public ResourceEncodingEnum getEncoding() {
return myEncoding;
}
}

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.partition;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import org.apache.commons.lang3.Validate;
@ -36,17 +37,27 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
@Autowired
IPartitionLookupSvc myPartitionConfigSvc;
@Autowired
PartitionSettings myPartitionSettings;
public RequestPartitionHelperSvc() {}
@Override
public RequestPartitionId validateAndNormalizePartitionIds(RequestPartitionId theRequestPartitionId) {
List<String> names = null;
List<Integer> partitionIds = null;
for (int i = 0; i < theRequestPartitionId.getPartitionIds().size(); i++) {
PartitionEntity partition;
Integer id = theRequestPartitionId.getPartitionIds().get(i);
if (id == null) {
partition = null;
if (myPartitionSettings.getDefaultPartitionId() != null) {
if (partitionIds == null) {
partitionIds = new ArrayList<>(theRequestPartitionId.getPartitionIds());
}
partitionIds.set(i, myPartitionSettings.getDefaultPartitionId());
}
} else {
try {
partition = myPartitionConfigSvc.getPartitionById(id);
@ -88,8 +99,12 @@ public class RequestPartitionHelperSvc extends BaseRequestPartitionHelperSvc {
}
if (names != null) {
List<Integer> partitionIdsToUse = theRequestPartitionId.getPartitionIds();
if (partitionIds != null) {
partitionIdsToUse = partitionIds;
}
return RequestPartitionId.forPartitionIdsAndNames(
names, theRequestPartitionId.getPartitionIds(), theRequestPartitionId.getPartitionDate());
names, partitionIdsToUse, theRequestPartitionId.getPartitionDate());
}
return theRequestPartitionId;

View File

@ -24,11 +24,13 @@ import ca.uhn.fhir.jpa.config.JpaConfig;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.param.HistorySearchStyleEnum;
import jakarta.annotation.Nullable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
@ -56,7 +58,7 @@ public class PersistedJpaBundleProviderFactory {
public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(
RequestDetails theRequestDetails,
SearchTask theTask,
ISearchBuilder theSearchBuilder,
ISearchBuilder<JpaPid> theSearchBuilder,
RequestPartitionId theRequestPartitionId) {
return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(
JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER,
@ -69,7 +71,7 @@ public class PersistedJpaBundleProviderFactory {
public IBundleProvider history(
RequestDetails theRequest,
String theResourceType,
Long theResourcePid,
@Nullable JpaPid theResourcePid,
Date theRangeStartInclusive,
Date theRangeEndInclusive,
Integer theOffset,
@ -88,7 +90,7 @@ public class PersistedJpaBundleProviderFactory {
public IBundleProvider history(
RequestDetails theRequest,
String theResourceType,
Long theResourcePid,
@Nullable JpaPid theResourcePid,
Date theRangeStartInclusive,
Date theRangeEndInclusive,
Integer theOffset,
@ -103,7 +105,9 @@ public class PersistedJpaBundleProviderFactory {
search.setLastUpdated(theRangeStartInclusive, theRangeEndInclusive);
search.setUuid(UUID.randomUUID().toString());
search.setResourceType(resourceName);
search.setResourceId(theResourcePid);
if (theResourcePid != null) {
search.setResourceId(theResourcePid.getId());
}
search.setSearchType(SearchTypeEnum.HISTORY);
search.setStatus(SearchStatusEnum.FINISHED);
search.setHistorySearchStyle(searchParameterType);

View File

@ -23,6 +23,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@ -81,8 +82,8 @@ public class ResourceSearchUrlSvc {
* Once a resource is updated or deleted, we can trust that future match checks will find the committed resource in the db.
* The use of the constraint table is done, and we can delete it to keep the table small.
*/
public void deleteByResId(long theResId) {
myResourceSearchUrlDao.deleteByResId(theResId);
public void deleteByResId(JpaPid theResId) {
myResourceSearchUrlDao.deleteByResId(theResId.getId());
}
public void deleteByResIds(List<Long> theResIds) {

View File

@ -19,10 +19,12 @@
*/
package ca.uhn.fhir.jpa.search.builder;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import java.io.Closeable;
import java.util.Iterator;
public interface ISearchQueryExecutor extends Iterator<Long>, Closeable {
public interface ISearchQueryExecutor extends Iterator<JpaPid>, Closeable {
/**
* Narrow the signature - no IOException allowed.
*/

View File

@ -0,0 +1,47 @@
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.search.builder;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import org.springframework.jdbc.core.RowMapper;
import java.sql.ResultSet;
import java.sql.SQLException;
public class JpaPidRowMapper implements RowMapper<JpaPid> {
private final boolean mySelectPartitionId;
public JpaPidRowMapper(boolean theSelectPartitionId) {
mySelectPartitionId = theSelectPartitionId;
}
@Override
public JpaPid mapRow(ResultSet theResultSet, int theRowNum) throws SQLException {
if (mySelectPartitionId) {
Integer partitionId = theResultSet.getObject(1, Integer.class);
Long resourceId = theResultSet.getLong(2);
return JpaPid.fromId(resourceId, partitionId);
} else {
Long resourceId = theResultSet.getLong(1);
return JpaPid.fromId(resourceId);
}
}
}

View File

@ -28,6 +28,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
@ -44,13 +45,13 @@ import ca.uhn.fhir.jpa.search.builder.predicate.ComboUniqueSearchParameterPredic
import ca.uhn.fhir.jpa.search.builder.predicate.CoordsPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ICanMakeMissingParamPredicate;
import ca.uhn.fhir.jpa.search.builder.predicate.ISourcePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ParsedLocationParam;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
@ -151,6 +152,7 @@ public class QueryStack {
private final PartitionSettings myPartitionSettings;
private final JpaStorageSettings myStorageSettings;
private final EnumSet<PredicateBuilderTypeEnum> myReusePredicateBuilderTypes;
private final RequestDetails myRequestDetails;
private Map<PredicateBuilderCacheKey, BaseJoiningPredicateBuilder> myJoinMap;
private Map<String, BaseJoiningPredicateBuilder> myParamNameToPredicateBuilderMap;
// used for _offset queries with sort, should be removed once the fix is applied to the async path too.
@ -161,6 +163,7 @@ public class QueryStack {
* Constructor
*/
public QueryStack(
RequestDetails theRequestDetails,
SearchParameterMap theSearchParameters,
JpaStorageSettings theStorageSettings,
FhirContext theFhirContext,
@ -168,6 +171,7 @@ public class QueryStack {
ISearchParamRegistry theSearchParamRegistry,
PartitionSettings thePartitionSettings) {
this(
theRequestDetails,
theSearchParameters,
theStorageSettings,
theFhirContext,
@ -181,6 +185,7 @@ public class QueryStack {
* Constructor
*/
private QueryStack(
RequestDetails theRequestDetails,
SearchParameterMap theSearchParameters,
JpaStorageSettings theStorageSettings,
FhirContext theFhirContext,
@ -188,6 +193,7 @@ public class QueryStack {
ISearchParamRegistry theSearchParamRegistry,
PartitionSettings thePartitionSettings,
EnumSet<PredicateBuilderTypeEnum> theReusePredicateBuilderTypes) {
myRequestDetails = theRequestDetails;
myPartitionSettings = thePartitionSettings;
assert theSearchParameters != null;
assert theStorageSettings != null;
@ -1035,7 +1041,6 @@ public class QueryStack {
searchParam,
Collections.singletonList(new UriParam(theFilter.getValue())),
theFilter.getOperation(),
theRequest,
theRequestPartitionId);
} else if (typeEnum == RestSearchParameterTypeEnum.STRING) {
return theQueryStack3.createPredicateString(
@ -1220,7 +1225,6 @@ public class QueryStack {
ResourceLinkPredicateBuilder resourceLinkTableJoin =
mySqlBuilder.addReferencePredicateBuilderReversed(this, theSourceJoinColumn);
Condition partitionPredicate = resourceLinkTableJoin.createPartitionIdPredicate(theRequestPartitionId);
List<String> paths = resourceLinkTableJoin.createResourceLinkPaths(
targetResourceType, paramReference, new ArrayList<>());
@ -1242,8 +1246,13 @@ public class QueryStack {
.setRequest(theRequest)
.setRequestPartitionId(theRequestPartitionId));
if (myPartitionSettings.isPartitionIdsInPrimaryKeys()) {
andPredicates.add(toAndPredicate(pathPredicate, typePredicate, linkedPredicate));
} else {
Condition partitionPredicate = resourceLinkTableJoin.createPartitionIdPredicate(theRequestPartitionId);
andPredicates.add(toAndPredicate(partitionPredicate, pathPredicate, typePredicate, linkedPredicate));
}
}
return toAndPredicate(andPredicates);
}
@ -1889,7 +1898,6 @@ public class QueryStack {
theParamDefinition,
theOrValues,
theOperation,
theRequest,
theRequestPartitionId,
theSqlBuilder);
break;
@ -1954,13 +1962,13 @@ public class QueryStack {
.findFirst();
if (isMissingSourceOptional.isPresent()) {
SourcePredicateBuilder join =
ISourcePredicateBuilder join =
getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.LEFT_OUTER);
orPredicates.add(join.createPredicateMissingSourceUri());
return toOrPredicate(orPredicates);
}
// for all other cases we use "INNER JOIN" to match search parameters
SourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.INNER);
ISourcePredicateBuilder join = getSourcePredicateBuilder(theSourceJoinColumn, SelectQuery.JoinType.INNER);
for (IQueryParameterType nextParameter : theList) {
SourceParam sourceParameter = new SourceParam(nextParameter.getValueAsQueryToken(myFhirContext));
@ -1980,13 +1988,22 @@ public class QueryStack {
return toOrPredicate(orPredicates);
}
private SourcePredicateBuilder getSourcePredicateBuilder(
private ISourcePredicateBuilder getSourcePredicateBuilder(
@Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) {
if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
return createOrReusePredicateBuilder(
PredicateBuilderTypeEnum.SOURCE,
theSourceJoinColumn,
Constants.PARAM_SOURCE,
() -> mySqlBuilder.addSourcePredicateBuilder(theSourceJoinColumn, theJoinType))
() -> mySqlBuilder.addResourceHistoryProvenancePredicateBuilder(
theSourceJoinColumn, theJoinType))
.getResult();
}
return createOrReusePredicateBuilder(
PredicateBuilderTypeEnum.SOURCE,
theSourceJoinColumn,
Constants.PARAM_SOURCE,
() -> mySqlBuilder.addResourceHistoryPredicateBuilder(theSourceJoinColumn, theJoinType))
.getResult();
}
@ -2321,7 +2338,6 @@ public class QueryStack {
RuntimeSearchParam theSearchParam,
List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation,
RequestDetails theRequestDetails,
RequestPartitionId theRequestPartitionId) {
return createPredicateUri(
theSourceJoinColumn,
@ -2330,7 +2346,6 @@ public class QueryStack {
theSearchParam,
theList,
theOperation,
theRequestDetails,
theRequestPartitionId,
mySqlBuilder);
}
@ -2342,7 +2357,6 @@ public class QueryStack {
RuntimeSearchParam theSearchParam,
List<? extends IQueryParameterType> theList,
SearchFilterParser.CompareOperation theOperation,
RequestDetails theRequestDetails,
RequestPartitionId theRequestPartitionId,
SearchQueryBuilder theSqlBuilder) {
@ -2361,13 +2375,14 @@ public class QueryStack {
} else {
UriPredicateBuilder join = theSqlBuilder.addUriPredicateBuilder(theSourceJoinColumn);
Condition predicate = join.addPredicate(theList, paramName, theOperation, theRequestDetails);
Condition predicate = join.addPredicate(theList, paramName, theOperation, myRequestDetails);
return join.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
}
}
public QueryStack newChildQueryFactoryWithFullBuilderReuse() {
return new QueryStack(
myRequestDetails,
mySearchParameters,
myStorageSettings,
myFhirContext,
@ -2452,7 +2467,6 @@ public class QueryStack {
*/
private Condition createPredicateResourcePID(
DbColumn[] theSourceJoinColumn, List<List<IQueryParameterType>> theAndOrParams) {
DbColumn pidColumn = getResourceIdColumn(theSourceJoinColumn);
if (pidColumn == null) {
@ -2662,7 +2676,6 @@ public class QueryStack {
nextParamDef,
nextAnd,
SearchFilterParser.CompareOperation.eq,
theRequest,
theRequestPartitionId));
}
break;
@ -2871,12 +2884,13 @@ public class QueryStack {
// expand out the pids
public void addPredicateEverythingOperation(
String theResourceName, List<String> theTypeSourceResourceNames, Long... theTargetPids) {
String theResourceName, List<String> theTypeSourceResourceNames, JpaPid... theTargetPids) {
ResourceLinkPredicateBuilder table = mySqlBuilder.addReferencePredicateBuilder(this, null);
Condition predicate =
table.createEverythingPredicate(theResourceName, theTypeSourceResourceNames, theTargetPids);
mySqlBuilder.addPredicate(predicate);
mySqlBuilder.getSelect().setIsDistinct(true);
addGrouping();
}
public IQueryParameterType newParameterInstance(

View File

@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor;
import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.Validate;
import java.util.Iterator;
import java.util.List;
public class SearchQueryExecutors {
@ -46,7 +45,7 @@ public class SearchQueryExecutors {
}
@Override
public Long next() {
public JpaPid next() {
myCount += 1;
return theExecutor.next();
}
@ -54,37 +53,7 @@ public class SearchQueryExecutors {
}
@Nonnull
public static ISearchQueryExecutor from(List<Long> rawPids) {
public static ISearchQueryExecutor from(List<JpaPid> rawPids) {
return new ResolvedSearchQueryExecutor(rawPids);
}
public static ISearchQueryExecutor from(Iterator<JpaPid> theIterator) {
return new JpaPidQueryAdaptor(theIterator);
}
public static ISearchQueryExecutor from(Iterable<JpaPid> theIterable) {
return new JpaPidQueryAdaptor(theIterable.iterator());
}
static class JpaPidQueryAdaptor implements ISearchQueryExecutor {
final Iterator<JpaPid> myIterator;
JpaPidQueryAdaptor(Iterator<JpaPid> theIterator) {
myIterator = theIterator;
}
@Override
public void close() {}
@Override
public boolean hasNext() {
return myIterator.hasNext();
}
@Override
public Long next() {
JpaPid next = myIterator.next();
return next == null ? null : next.getId();
}
}
}

View File

@ -19,6 +19,7 @@
*/
package ca.uhn.fhir.jpa.search.builder.models;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
import jakarta.annotation.Nonnull;
@ -26,18 +27,18 @@ import java.util.Iterator;
import java.util.List;
public class ResolvedSearchQueryExecutor implements ISearchQueryExecutor {
private final Iterator<Long> myIterator;
private final Iterator<JpaPid> myIterator;
public ResolvedSearchQueryExecutor(Iterable<Long> theIterable) {
public ResolvedSearchQueryExecutor(Iterable<JpaPid> theIterable) {
this(theIterable.iterator());
}
public ResolvedSearchQueryExecutor(Iterator<Long> theIterator) {
public ResolvedSearchQueryExecutor(Iterator<JpaPid> theIterator) {
myIterator = theIterator;
}
@Nonnull
public static ResolvedSearchQueryExecutor from(List<Long> rawPids) {
public static ResolvedSearchQueryExecutor from(List<JpaPid> rawPids) {
return new ResolvedSearchQueryExecutor(rawPids);
}
@ -47,7 +48,7 @@ public class ResolvedSearchQueryExecutor implements ISearchQueryExecutor {
}
@Override
public Long next() {
public JpaPid next() {
return myIterator.next();
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.search.builder.predicate;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
import com.healthmarketscience.sqlbuilder.Condition;
@ -31,6 +32,7 @@ import com.healthmarketscience.sqlbuilder.dbspec.basic.DbTable;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.Validate;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
@ -96,15 +98,16 @@ public abstract class BaseJoiningPredicateBuilder extends BasePredicateBuilder {
}
}
public Condition createPredicateResourceIds(boolean theInverse, List<Long> theResourceIds) {
public Condition createPredicateResourceIds(boolean theInverse, Collection<JpaPid> theResourceIds) {
Validate.notNull(theResourceIds, "theResourceIds must not be null");
// Handle the _id parameter by adding it to the tail
Condition inResourceIds =
QueryParameterUtils.toEqualToOrInPredicate(getResourceIdColumn(), generatePlaceholders(theResourceIds));
Condition inResourceIds = QueryParameterUtils.toEqualToOrInPredicate(
getResourceIdColumn(), generatePlaceholders(JpaPid.toLongList(theResourceIds)));
if (theInverse) {
inResourceIds = new NotCondition(inResourceIds);
}
// Handle the _id parameter by adding it to the tail
return inResourceIds;
}

View File

@ -17,17 +17,20 @@
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.dao.data;
package ca.uhn.fhir.jpa.search.builder.predicate;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.model.api.IQueryParameterType;
import com.healthmarketscience.sqlbuilder.Condition;
import java.util.Collection;
public interface ISourcePredicateBuilder {
public interface IResourceSearchViewDao extends JpaRepository<ResourceSearchView, Long>, IHapiFhirJpaRepository {
Condition createPredicateMissingSourceUri();
@Query("SELECT v FROM ResourceSearchView v WHERE v.myResourceId in (:pids)")
Collection<ResourceSearchView> findByResourceIds(@Param("pids") Collection<Long> pids);
Condition createPredicateSourceUri(String theSourceUri);
Condition createPredicateRequestId(String theRequestId);
Condition createPredicateSourceUriWithModifiers(
IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri);
}

View File

@ -40,7 +40,7 @@ import java.util.List;
import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftAndRightMatchLikeExpression;
import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftMatchLikeExpression;
public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder {
public class ResourceHistoryPredicateBuilder extends BaseJoiningPredicateBuilder implements ISourcePredicateBuilder {
private final DbColumn myColumnSourceUri;
private final DbColumn myColumnRequestId;
@ -49,10 +49,10 @@ public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder {
/**
* Constructor
*/
public SourcePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) {
super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER_PROV"));
public ResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) {
super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER"));
myResourceIdColumn = getTable().addColumn("RES_PID");
myResourceIdColumn = getTable().addColumn("RES_ID");
myColumnSourceUri = getTable().addColumn("SOURCE_URI");
myColumnRequestId = getTable().addColumn("REQUEST_ID");
}
@ -62,14 +62,17 @@ public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder {
return myResourceIdColumn;
}
@Override
public Condition createPredicateSourceUri(String theSourceUri) {
return BinaryCondition.equalTo(myColumnSourceUri, generatePlaceholder(theSourceUri));
}
@Override
public Condition createPredicateMissingSourceUri() {
return UnaryCondition.isNull(myColumnSourceUri);
}
@Override
public Condition createPredicateSourceUriWithModifiers(
IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri) {
if (theQueryParameter.getMissing() != null && !theQueryParameter.getMissing()) {
@ -117,6 +120,7 @@ public class SourcePredicateBuilder extends BaseJoiningPredicateBuilder {
}
}
@Override
public Condition createPredicateRequestId(String theRequestId) {
return BinaryCondition.equalTo(myColumnRequestId, generatePlaceholder(theRequestId));
}

View File

@ -0,0 +1,128 @@
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.search.builder.predicate;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.util.StringUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.healthmarketscience.sqlbuilder.BinaryCondition;
import com.healthmarketscience.sqlbuilder.Condition;
import com.healthmarketscience.sqlbuilder.FunctionCall;
import com.healthmarketscience.sqlbuilder.UnaryCondition;
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
import java.util.List;
import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftAndRightMatchLikeExpression;
import static ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder.createLeftMatchLikeExpression;
public class ResourceHistoryProvenancePredicateBuilder extends BaseJoiningPredicateBuilder
implements ISourcePredicateBuilder {
private final DbColumn myColumnSourceUri;
private final DbColumn myColumnRequestId;
private final DbColumn myResourceIdColumn;
/**
* Constructor
*/
public ResourceHistoryProvenancePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) {
super(theSearchSqlBuilder, theSearchSqlBuilder.addTable("HFJ_RES_VER_PROV"));
myResourceIdColumn = getTable().addColumn("RES_PID");
myColumnSourceUri = getTable().addColumn("SOURCE_URI");
myColumnRequestId = getTable().addColumn("REQUEST_ID");
}
@Override
public DbColumn getResourceIdColumn() {
return myResourceIdColumn;
}
@Override
public Condition createPredicateSourceUri(String theSourceUri) {
return BinaryCondition.equalTo(myColumnSourceUri, generatePlaceholder(theSourceUri));
}
@Override
public Condition createPredicateMissingSourceUri() {
return UnaryCondition.isNull(myColumnSourceUri);
}
@Override
public Condition createPredicateSourceUriWithModifiers(
IQueryParameterType theQueryParameter, JpaStorageSettings theStorageSetting, String theSourceUri) {
if (theQueryParameter.getMissing() != null && !theQueryParameter.getMissing()) {
return UnaryCondition.isNotNull(myColumnSourceUri);
} else if (theQueryParameter instanceof UriParam && theQueryParameter.getQueryParameterQualifier() != null) {
UriParam uriParam = (UriParam) theQueryParameter;
switch (uriParam.getQualifier()) {
case ABOVE:
return createPredicateSourceAbove(theSourceUri);
case BELOW:
return createPredicateSourceBelow(theSourceUri);
case CONTAINS:
return createPredicateSourceContains(theStorageSetting, theSourceUri);
default:
throw new InvalidRequestException(Msg.code(2569)
+ String.format(
"Unsupported qualifier specified, qualifier=%s",
theQueryParameter.getQueryParameterQualifier()));
}
} else {
return createPredicateSourceUri(theSourceUri);
}
}
private Condition createPredicateSourceAbove(String theSourceUri) {
List<String> aboveUriCandidates = UrlUtil.getAboveUriCandidates(theSourceUri);
List<String> aboveUriPlaceholders = generatePlaceholders(aboveUriCandidates);
return QueryParameterUtils.toEqualToOrInPredicate(myColumnSourceUri, aboveUriPlaceholders);
}
private Condition createPredicateSourceBelow(String theSourceUri) {
String belowLikeExpression = createLeftMatchLikeExpression(theSourceUri);
return BinaryCondition.like(myColumnSourceUri, generatePlaceholder(belowLikeExpression));
}
private Condition createPredicateSourceContains(JpaStorageSettings theStorageSetting, String theSourceUri) {
if (theStorageSetting.isAllowContainsSearches()) {
FunctionCall upperFunction = new FunctionCall("UPPER");
upperFunction.addCustomParams(myColumnSourceUri);
String normalizedString = StringUtil.normalizeStringForSearchIndexing(theSourceUri);
String containsLikeExpression = createLeftAndRightMatchLikeExpression(normalizedString);
return BinaryCondition.like(upperFunction, generatePlaceholder(containsLikeExpression));
} else {
throw new MethodNotAllowedException(Msg.code(2570) + ":contains modifier is disabled on this server");
}
}
@Override
public Condition createPredicateRequestId(String theRequestId) {
return BinaryCondition.equalTo(myColumnRequestId, generatePlaceholder(theRequestId));
}
}

View File

@ -25,6 +25,8 @@ import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode;
import ca.uhn.fhir.jpa.dao.predicate.SearchFilterParser;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject;
import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
import ca.uhn.fhir.model.api.IQueryParameterType;
@ -129,29 +131,36 @@ public class ResourceIdPredicateBuilder extends BasePredicateBuilder {
assert operation == SearchFilterParser.CompareOperation.eq
|| operation == SearchFilterParser.CompareOperation.ne;
List<Long> resourceIds = JpaPid.toLongList(allOrPids);
if (theSourceJoinColumn == null) {
BaseJoiningPredicateBuilder queryRootTable = super.getOrCreateQueryRootTable(true);
Condition predicate;
switch (operation) {
default:
case eq:
predicate = queryRootTable.createPredicateResourceIds(false, resourceIds);
predicate = queryRootTable.createPredicateResourceIds(false, allOrPids);
break;
case ne:
predicate = queryRootTable.createPredicateResourceIds(true, resourceIds);
predicate = queryRootTable.createPredicateResourceIds(true, allOrPids);
break;
}
predicate = queryRootTable.combineWithRequestPartitionIdPredicate(theRequestPartitionId, predicate);
return predicate;
} else {
if (getSearchQueryBuilder().isIncludePartitionIdInJoins()) {
ColumnTupleObject left = new ColumnTupleObject(theSourceJoinColumn);
JpaPidValueTuples right = JpaPidValueTuples.from(getSearchQueryBuilder(), allOrPids);
return QueryParameterUtils.toInPredicate(
left, right, operation == SearchFilterParser.CompareOperation.ne);
} else {
DbColumn resIdColumn = getResourceIdColumn(theSourceJoinColumn);
List<Long> resourceIds = JpaPid.toLongList(allOrPids);
return QueryParameterUtils.toEqualToOrInPredicate(
resIdColumn,
generatePlaceholders(resourceIds),
operation == SearchFilterParser.CompareOperation.ne);
}
}
}
return null;
}

View File

@ -43,6 +43,8 @@ import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.search.builder.QueryStack;
import ca.uhn.fhir.jpa.search.builder.models.MissingQueryParameterPredicateParams;
import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject;
import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.ResourceMetaParams;
@ -65,6 +67,7 @@ import com.google.common.collect.Lists;
import com.healthmarketscience.sqlbuilder.BinaryCondition;
import com.healthmarketscience.sqlbuilder.ComboCondition;
import com.healthmarketscience.sqlbuilder.Condition;
import com.healthmarketscience.sqlbuilder.InCondition;
import com.healthmarketscience.sqlbuilder.NotCondition;
import com.healthmarketscience.sqlbuilder.SelectQuery;
import com.healthmarketscience.sqlbuilder.UnaryCondition;
@ -79,7 +82,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
@ -814,14 +816,20 @@ public class ResourceLinkPredicateBuilder extends BaseJoiningPredicateBuilder im
@Nonnull
public Condition createEverythingPredicate(
String theResourceName, List<String> theSourceResourceNames, Long... theTargetPids) {
String theResourceName, List<String> theSourceResourceNames, JpaPid... theTargetPids) {
Condition condition;
if (theTargetPids != null && theTargetPids.length >= 1) {
// if resource ids are provided, we'll create the predicate
// with ids in or equal to this value
if (getSearchQueryBuilder().isIncludePartitionIdInJoins()) {
Object left = ColumnTupleObject.from(getJoinColumnsForTarget());
JpaPidValueTuples right = JpaPidValueTuples.from(getSearchQueryBuilder(), theTargetPids);
condition = new InCondition(left, right);
} else {
condition = QueryParameterUtils.toEqualToOrInPredicate(
myColumnTargetResourceId, generatePlaceholders(Arrays.asList(theTargetPids)));
myColumnTargetResourceId, generatePlaceholders(JpaPid.toLongList(theTargetPids)));
}
} else {
// ... otherwise we look for resource types
condition = BinaryCondition.equalTo(myColumnTargetResourceType, generatePlaceholder(theResourceName));

View File

@ -0,0 +1,87 @@
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.search.builder.sql;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import com.healthmarketscience.common.util.AppendableExt;
import com.healthmarketscience.sqlbuilder.Expression;
import com.healthmarketscience.sqlbuilder.ValidationContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
/**
* Outputs an SQL tuple for a collection of JpaPids, consisting of
* ((resId,partitionId),(resId,partitionId),(resId,partitionId),...)
*/
public class JpaPidValueTuples extends Expression {
private final Collection<String> myValues;
public JpaPidValueTuples(Collection<String> theValues) {
myValues = theValues;
}
@Override
protected void collectSchemaObjects(ValidationContext vContext) {
// nothing
}
@Override
public void appendTo(AppendableExt app) throws IOException {
app.append('(');
String value;
for (Iterator<String> iter = myValues.iterator(); iter.hasNext(); ) {
if (hasParens()) {
app.append("('");
}
value = iter.next();
app.append(value);
app.append("','");
value = iter.next();
app.append(value);
app.append("')");
if (iter.hasNext()) {
app.append(',');
}
}
if (hasParens()) {
app.append(')');
}
}
public static JpaPidValueTuples from(SearchQueryBuilder theSearchQueryBuilder, JpaPid[] thePids) {
return from(theSearchQueryBuilder, Arrays.asList(thePids));
}
public static JpaPidValueTuples from(SearchQueryBuilder theSearchQueryBuilder, Collection<JpaPid> thePids) {
List<String> placeholders = new ArrayList<>(thePids.size() * 2);
for (JpaPid next : thePids) {
placeholders.add(theSearchQueryBuilder.generatePlaceholder(next.getPartitionId()));
placeholders.add(theSearchQueryBuilder.generatePlaceholder(next.getId()));
}
return new JpaPidValueTuples(placeholders);
}
}

View File

@ -35,11 +35,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
@ -282,9 +283,20 @@ public class SearchQueryBuilder {
/**
* Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a <code>_source</code> search parameter
*/
public SourcePredicateBuilder addSourcePredicateBuilder(
public ResourceHistoryProvenancePredicateBuilder addResourceHistoryProvenancePredicateBuilder(
@Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) {
SourcePredicateBuilder retVal = mySqlBuilderFactory.newSourcePredicateBuilder(this);
ResourceHistoryProvenancePredicateBuilder retVal =
mySqlBuilderFactory.newResourceHistoryProvenancePredicateBuilder(this);
addTable(retVal, theSourceJoinColumn, theJoinType);
return retVal;
}
/**
* Add and return a predicate builder (or a root query if no root query exists yet) for selecting on a <code>_source</code> search parameter
*/
public ResourceHistoryPredicateBuilder addResourceHistoryPredicateBuilder(
@Nullable DbColumn[] theSourceJoinColumn, SelectQuery.JoinType theJoinType) {
ResourceHistoryPredicateBuilder retVal = mySqlBuilderFactory.newResourceHistoryPredicateBuilder(this);
addTable(retVal, theSourceJoinColumn, theJoinType);
return retVal;
}
@ -823,9 +835,11 @@ public class SearchQueryBuilder {
return false;
}
public void addResourceIdsPredicate(List<Long> thePidList) {
public void addResourceIdsPredicate(List<JpaPid> thePidList) {
List<Long> pidList = thePidList.stream().map(JpaPid::getId).collect(Collectors.toList());
DbColumn resourceIdColumn = getOrCreateFirstPredicateBuilder().getResourceIdColumn();
InCondition predicate = new InCondition(resourceIdColumn, generatePlaceholders(thePidList));
InCondition predicate = new InCondition(resourceIdColumn, generatePlaceholders(pidList));
addPredicate(predicate);
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.search.builder.sql;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
import ca.uhn.fhir.jpa.util.ScrollableResultsIterator;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
@ -42,7 +43,7 @@ import java.util.Objects;
public class SearchQueryExecutor implements ISearchQueryExecutor {
private static final Long NO_MORE = -1L;
private static final JpaPid NO_MORE = JpaPid.fromId(-1L);
private static final SearchQueryExecutor NO_VALUE_EXECUTOR = new SearchQueryExecutor();
private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0];
private static final Logger ourLog = LoggerFactory.getLogger(SearchQueryExecutor.class);
@ -53,7 +54,7 @@ public class SearchQueryExecutor implements ISearchQueryExecutor {
private boolean myQueryInitialized;
private ScrollableResultsIterator<Object> myResultSet;
private Long myNext;
private JpaPid myNext;
/**
* Constructor
@ -86,10 +87,10 @@ public class SearchQueryExecutor implements ISearchQueryExecutor {
}
@Override
public Long next() {
public JpaPid next() {
fetchNext();
Validate.isTrue(hasNext(), "Can not call next() right now, no data remains");
Long next = myNext;
JpaPid next = myNext;
myNext = null;
return next;
}
@ -155,17 +156,17 @@ public class SearchQueryExecutor implements ISearchQueryExecutor {
}
}
private long getNextPid(ScrollableResultsIterator<Object> theResultSet) {
private JpaPid getNextPid(ScrollableResultsIterator<Object> theResultSet) {
Object nextRow = Objects.requireNonNull(theResultSet.next());
// We should typically get two columns back, the first is the partition ID and the second
// is the resource ID. But if we're doing a count query, we'll get a single column in an array
// or maybe even just a single non array value depending on how the platform handles it.
if (nextRow instanceof Number) {
return ((Number) nextRow).longValue();
return JpaPid.fromId(((Number) nextRow).longValue());
} else {
Object[] nextRowAsArray = (Object[]) nextRow;
if (nextRowAsArray.length == 1) {
return (Long) nextRowAsArray[0];
return JpaPid.fromId((Long) nextRowAsArray[0]);
} else {
int i;
// TODO MB add a strategy object to GeneratedSql to describe the result set.
@ -181,9 +182,11 @@ public class SearchQueryExecutor implements ISearchQueryExecutor {
// - partition_id, res_id, coord-dist
// Assume res_id is first Long in row, and is in first two columns
if (nextRowAsArray[0] instanceof Long) {
return (long) nextRowAsArray[0];
return JpaPid.fromId((Long) nextRowAsArray[0]);
} else {
return (long) nextRowAsArray[1];
Integer partitionId = (Integer) nextRowAsArray[0];
Long pid = (Long) nextRowAsArray[1];
return JpaPid.fromId(pid, partitionId);
}
}
}

View File

@ -27,11 +27,12 @@ import ca.uhn.fhir.jpa.search.builder.predicate.DatePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.NumberPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityNormalizedPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.QuantityPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceHistoryProvenancePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceIdPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceLinkPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SearchParamPresentPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.SourcePredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TagPredicateBuilder;
import ca.uhn.fhir.jpa.search.builder.predicate.TokenPredicateBuilder;
@ -109,8 +110,13 @@ public class SqlObjectFactory {
return myApplicationContext.getBean(TagPredicateBuilder.class, theSearchSqlBuilder);
}
public SourcePredicateBuilder newSourcePredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) {
return myApplicationContext.getBean(SourcePredicateBuilder.class, theSearchSqlBuilder);
public ResourceHistoryPredicateBuilder newResourceHistoryPredicateBuilder(SearchQueryBuilder theSearchSqlBuilder) {
return myApplicationContext.getBean(ResourceHistoryPredicateBuilder.class, theSearchSqlBuilder);
}
public ResourceHistoryProvenancePredicateBuilder newResourceHistoryProvenancePredicateBuilder(
SearchQueryBuilder theSearchSqlBuilder) {
return myApplicationContext.getBean(ResourceHistoryProvenancePredicateBuilder.class, theSearchSqlBuilder);
}
public SearchQueryExecutor newSearchQueryExecutor(GeneratedSql theGeneratedSql, Integer theMaxResultsToFetch) {

View File

@ -68,7 +68,7 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc {
ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size());
return JpaPid.fromLongList(retVal);
return ISearchResultDao.toJpaPidList(retVal);
});
}
@ -81,7 +81,7 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc {
.execute(() -> {
List<Long> retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId());
ourLog.trace("fetchAllResultPids returned {} pids", retVal.size());
return JpaPid.fromLongList(retVal);
return ISearchResultDao.toJpaPidList(retVal);
});
}

View File

@ -177,7 +177,7 @@ public class InstanceReindexServiceImpl implements IInstanceReindexService {
List<String> messages = new ArrayList<>();
JpaPid pid = JpaPid.fromId(entity.getId());
JpaPid pid = entity.getPersistentId();
ReindexOutcome outcome = dao.reindex(pid, new ReindexParameters(), theRequestDetails, new TransactionDetails());
messages.add("Reindex completed in " + sw);

View File

@ -27,7 +27,6 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import org.hl7.fhir.instance.model.api.IBaseResource;
@ -61,16 +60,10 @@ public class ResourceReindexer {
myFhirContext = theFhirContext;
}
public void readAndReindexResourceByPid(Long theResourcePid) {
ResourceTable resourceTable =
myResourceTableDao.findById(theResourcePid).orElseThrow(IllegalStateException::new);
reindexResourceEntity(resourceTable);
}
public void reindexResourceEntity(ResourceTable theResourceTable) {
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceTable.getResourceType());
long expectedVersion = theResourceTable.getVersion();
IBaseResource resource = dao.readByPid(JpaPid.fromId(theResourceTable.getId()), true);
IBaseResource resource = dao.readByPid(theResourceTable.getPersistentId(), true);
if (resource == null) {
throw new InternalErrorException(Msg.code(1171) + "Could not find resource version "

View File

@ -23,11 +23,11 @@ import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs;
@ -79,9 +79,9 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/**
* @see ca.uhn.fhir.jpa.reindex.job.ReindexJobConfig
* @deprecated Use the Batch2 {@link ca.uhn.fhir.batch2.api.IJobCoordinator#startInstance(JobInstanceStartRequest)} instead.
*/
@SuppressWarnings({"removal", "DeprecatedIsStillUsed"})
@Deprecated
public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasScheduledJobs {
@ -107,9 +107,6 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
@Autowired
private IResourceTableDao myResourceTableDao;
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private FhirContext myContext;
@ -261,10 +258,10 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
private int runReindexJobs() {
Collection<ResourceReindexJobEntity> jobs = getResourceReindexJobEntities();
if (jobs.size() > 0) {
if (!jobs.isEmpty()) {
ourLog.info("Running {} reindex jobs: {}", jobs.size(), jobs);
} else {
ourLog.debug("Running {} reindex jobs: {}", jobs.size(), jobs);
ourLog.debug("Running 0 reindex jobs");
return 0;
}
@ -356,7 +353,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
// Submit each resource requiring reindexing
List<Future<Date>> futures = range.stream()
.map(t -> myTaskExecutor.submit(new ResourceReindexingTask(t, counter)))
.map(t -> myTaskExecutor.submit(new ResourceReindexingTask(JpaPid.fromId(t), counter)))
.collect(Collectors.toList());
Date latestDate = null;
@ -429,62 +426,64 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
});
}
private void markResourceAsIndexingFailed(final long theId) {
private void markResourceAsIndexingFailed(final JpaPid theId) {
TransactionTemplate txTemplate = new TransactionTemplate(myTxManager);
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
txTemplate.execute((TransactionCallback<Void>) theStatus -> {
ourLog.info("Marking resource with PID {} as indexing_failed", theId);
myResourceTableDao.updateIndexStatus(theId, BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED);
myResourceTableDao.updateIndexStatus(theId.getId(), BaseHapiFhirDao.INDEX_STATUS_INDEXING_FAILED);
Query q = myEntityManager.createQuery("DELETE FROM ResourceTag t WHERE t.myResourceId = :id");
q.setParameter("id", theId);
Query q = myEntityManager.createQuery("DELETE FROM ResourceTag t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery(
"DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResourcePid = :id");
q.setParameter("id", theId);
q.executeUpdate();
q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamDate t WHERE t.myResourcePid = :id");
q.setParameter("id", theId);
"DELETE FROM ResourceIndexedSearchParamCoords t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery(
"DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResourcePid = :id");
q.setParameter("id", theId);
"DELETE FROM ResourceIndexedSearchParamDate t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery(
"DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResourcePid = :id");
q.setParameter("id", theId);
"DELETE FROM ResourceIndexedSearchParamNumber t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery(
"DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResourcePid = :id");
q.setParameter("id", theId);
"DELETE FROM ResourceIndexedSearchParamQuantity t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery(
"DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResourcePid = :id");
q.setParameter("id", theId);
"DELETE FROM ResourceIndexedSearchParamQuantityNormalized t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery(
"DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResourcePid = :id");
q.setParameter("id", theId);
"DELETE FROM ResourceIndexedSearchParamString t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery("DELETE FROM ResourceIndexedSearchParamUri t WHERE t.myResourcePid = :id");
q.setParameter("id", theId);
q = myEntityManager.createQuery(
"DELETE FROM ResourceIndexedSearchParamToken t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.mySourceResourcePid = :id");
q.setParameter("id", theId);
q = myEntityManager.createQuery(
"DELETE FROM ResourceIndexedSearchParamUri t WHERE t.myResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.myTargetResourcePid = :id");
q.setParameter("id", theId);
q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.mySourceResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
q = myEntityManager.createQuery("DELETE FROM ResourceLink t WHERE t.myTargetResource.myId = :id");
q.setParameter("id", theId.getId());
q.executeUpdate();
return null;
@ -492,11 +491,11 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
}
private class ResourceReindexingTask implements Callable<Date> {
private final Long myNextId;
private final JpaPid myNextId;
private final AtomicInteger myCounter;
private Date myUpdated;
ResourceReindexingTask(Long theNextId, AtomicInteger theCounter) {
ResourceReindexingTask(JpaPid theNextId, AtomicInteger theCounter) {
myNextId = theNextId;
myCounter = theCounter;
}
@ -534,7 +533,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc, IHasSc
Throwable reindexFailure;
reindexFailure = myTxTemplate.execute(t -> {
ResourceTable resourceTable =
myResourceTableDao.findById(myNextId).orElseThrow(IllegalStateException::new);
myResourceTableDao.findById(myNextId.getId()).orElseThrow(IllegalStateException::new);
myUpdated = resourceTable.getUpdatedDate();
try {

View File

@ -19,6 +19,8 @@
*/
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@ -33,9 +35,9 @@ import java.util.List;
*/
public class InClauseNormalizer {
public static List<Long> normalizeIdListForInClause(List<Long> theResourceIds) {
public static List<JpaPid> normalizeIdListForInClause(List<JpaPid> theResourceIds) {
List<Long> retVal = theResourceIds;
List<JpaPid> retVal = theResourceIds;
int listSize = theResourceIds.size();
@ -56,8 +58,8 @@ public class InClauseNormalizer {
return retVal;
}
private static List<Long> padIdListWithPlaceholders(List<Long> theIdList, int preferredListSize) {
List<Long> retVal = theIdList;
private static List<JpaPid> padIdListWithPlaceholders(List<JpaPid> theIdList, int preferredListSize) {
List<JpaPid> retVal = theIdList;
if (isUnmodifiableList(theIdList)) {
retVal = new ArrayList<>(preferredListSize);
@ -65,13 +67,13 @@ public class InClauseNormalizer {
}
while (retVal.size() < preferredListSize) {
retVal.add(-1L);
retVal.add(JpaPid.fromId(-1L, null));
}
return retVal;
}
private static boolean isUnmodifiableList(List<Long> theList) {
private static boolean isUnmodifiableList(List<JpaPid> theList) {
try {
theList.addAll(Collections.emptyList());
} catch (Exception e) {

View File

@ -26,6 +26,8 @@ import ca.uhn.fhir.jpa.entity.SearchInclude;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.search.builder.sql.ColumnTupleObject;
import ca.uhn.fhir.jpa.search.builder.sql.JpaPidValueTuples;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.primitive.InstantDt;
@ -118,6 +120,12 @@ public class QueryParameterUtils {
return toAndPredicate(Arrays.asList(theAndPredicates));
}
@Nonnull
public static Condition toInPredicate(
ColumnTupleObject theColumns, JpaPidValueTuples theValues, boolean theInverse) {
return new InCondition(theColumns, theValues).setNegate(theInverse);
}
@Nonnull
public static Condition toEqualToOrInPredicate(
DbColumn theColumn, List<String> theValuePlaceholders, boolean theInverse) {

View File

@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.search.builder;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.util.FhirContextSearchParamRegistry;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import org.junit.jupiter.api.BeforeEach;
@ -45,7 +46,7 @@ class SearchBuilderTest {
@Test
void testCalculateIndexUriIdentityHashesForResourceTypes_Include_Null() {
Set<Long> types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(null, false);
Set<Long> types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), null, false).myHashIdentityValues;
// There are only 12 resource types that actually can be linked to by the QuestionnaireResponse
// resource via canonical references in any parameters
assertThat(types).hasSize(1);
@ -54,14 +55,14 @@ class SearchBuilderTest {
@Test
void testCalculateIndexUriIdentityHashesForResourceTypes_Include_Nonnull() {
Set<String> inputTypes = Set.of("Questionnaire");
Set<Long> types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(inputTypes, false);
Set<Long> types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), inputTypes, false).myHashIdentityValues;
// Just the one that we actually specified
assertThat(types).hasSize(1);
}
@Test
void testCalculateIndexUriIdentityHashesForResourceTypes_RevInclude_Null() {
Set<Long> types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(null, true);
Set<Long> types = mySearchBuilder.calculateIndexUriIdentityHashesForResourceTypes(new SystemRequestDetails(), null, true).myHashIdentityValues;
// Revincludes are really hard to figure out the potential resource types for, so we just need to
// use all active resource types
assertThat(types).hasSize(146);

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.search.builder;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
@ -14,7 +15,7 @@ class SearchQueryExecutorsTest {
@Test
public void adaptFromLongArrayYieldsAllValues() {
List<Long> listWithValues = Arrays.asList(1L,2L,3L,4L,5L);
List<JpaPid> listWithValues = JpaPid.fromLongList(Arrays.asList(1L,2L,3L,4L,5L));
ISearchQueryExecutor queryExecutor = SearchQueryExecutors.from(listWithValues);
@ -24,7 +25,7 @@ class SearchQueryExecutorsTest {
@Test
public void limitedCountDropsTrailingTest() {
// given
List<Long> vals = Arrays.asList(1L,2L,3L,4L,5L);
List<JpaPid> vals = JpaPid.fromLongList(Arrays.asList(1L,2L,3L,4L,5L));
ISearchQueryExecutor target = SearchQueryExecutors.from(vals);
ISearchQueryExecutor queryExecutor = SearchQueryExecutors.limited(target, 3);
@ -35,7 +36,7 @@ class SearchQueryExecutorsTest {
@Test
public void limitedCountExhaustsBeforeLimitOkTest() {
// given
List<Long> vals = Arrays.asList(1L,2L,3L);
List<JpaPid> vals = JpaPid.fromLongList(Arrays.asList(1L,2L,3L));
ISearchQueryExecutor target = SearchQueryExecutors.from(vals);
ISearchQueryExecutor queryExecutor = SearchQueryExecutors.limited(target, 5);
@ -46,6 +47,7 @@ class SearchQueryExecutorsTest {
private List<Long> drain(ISearchQueryExecutor theQueryExecutor) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(theQueryExecutor, 0), false)
.map(JpaPid::getId)
.collect(Collectors.toList());
}

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.util;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.util.InClauseNormalizer;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
@ -14,16 +15,16 @@ import static java.util.Collections.unmodifiableList;
import static org.assertj.core.api.Assertions.assertThat;
public class InClauseNormalizerTest {
private static final Long ourResourceId = 1L;
private static final Long ourPaddingValue = -1L;
private static final JpaPid ourResourceId = JpaPid.fromId(1L);
private static final JpaPid ourPaddingValue = JpaPid.fromId(-1L);
@ParameterizedTest
@MethodSource("arguments")
public void testNormalizeUnmodifiableList_willCreateNewListAndPadToSize(int theInitialListSize, int theExpectedNormalizedListSize) {
List<Long> initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId));
List<JpaPid> initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId));
initialList = unmodifiableList(initialList);
List<Long> normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList);
List<JpaPid> normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList);
assertNormalizedList(initialList, normalizedList, theInitialListSize, theExpectedNormalizedListSize);
}
@ -31,23 +32,23 @@ public class InClauseNormalizerTest {
@ParameterizedTest
@MethodSource("arguments")
public void testNormalizeListToSizeAndPad(int theInitialListSize, int theExpectedNormalizedListSize) {
List<Long> initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId));
List<JpaPid> initialList = new ArrayList<>(nCopies(theInitialListSize, ourResourceId));
List<Long> normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList);
List<JpaPid> normalizedList = InClauseNormalizer.normalizeIdListForInClause(initialList);
assertNormalizedList(initialList, normalizedList, theInitialListSize, theExpectedNormalizedListSize);
}
private void assertNormalizedList(List<Long> theInitialList, List<Long> theNormalizedList, int theInitialListSize, int theExpectedNormalizedListSize) {
List<Long> expectedPaddedSubList = new ArrayList<>(nCopies(theExpectedNormalizedListSize - theInitialListSize, ourPaddingValue));
private void assertNormalizedList(List<JpaPid> theInitialList, List<JpaPid> theNormalizedList, int theInitialListSize, int theExpectedNormalizedListSize) {
List<JpaPid> expectedPaddedSubList = new ArrayList<>(nCopies(theExpectedNormalizedListSize - theInitialListSize, ourPaddingValue));
assertThat(theNormalizedList).startsWith(listToArray(theInitialList));
assertThat(theNormalizedList).hasSize(theExpectedNormalizedListSize);
assertThat(theNormalizedList).endsWith(listToArray(expectedPaddedSubList));
}
static Long[] listToArray(List<Long> theList) {
return theList.toArray(new Long[0]);
static JpaPid[] listToArray(List<JpaPid> theList) {
return theList.toArray(new JpaPid[0]);
}
private static Stream<Arguments> arguments(){

View File

@ -37,7 +37,7 @@ import org.springframework.transaction.annotation.Transactional;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@ -77,7 +77,16 @@ public class MdmCandidateSearchSvc {
@Transactional
public Collection<IAnyResource> findCandidates(
String theResourceType, IAnyResource theResource, RequestPartitionId theRequestPartitionId) {
Map<IResourcePersistentId, IAnyResource> matchedPidsToResources = new HashMap<>();
/*
* This is a LinkedHashMap only because a number of Smile MDM unit tests depend on
* the order of candidates being returned in an order consistent with the order they
* were created. Before we added the partition ID to the hashCode() of JpaPid this
* seemed to happen naturally by complete coincidence, but after that change it
* stopped happening. So now a linked hashmap is used instead.
*/
Map<IResourcePersistentId, IAnyResource> matchedPidsToResources = new LinkedHashMap<>();
List<MdmFilterSearchParamJson> filterSearchParams =
myMdmSettings.getMdmRules().getCandidateFilterSearchParams();
List<String> filterCriteria = buildFilterQuery(filterSearchParams, theResourceType);

View File

@ -21,23 +21,37 @@ package ca.uhn.fhir.jpa.model.dao;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId;
import jakarta.annotation.Nonnull;
import org.apache.commons.collections4.ComparatorUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
/**
* JPA implementation of IResourcePersistentId. JPA uses a Long as the primary key. This class should be used in any
* context where the pid is known to be a Long.
*/
public class JpaPid extends BaseResourcePersistentId<Long> {
public class JpaPid extends BaseResourcePersistentId<Long> implements Comparable<JpaPid> {
private final Long myId;
private PartitionablePartitionId myPartitionablePartitionId;
private static final Comparator<JpaPid> COMPARATOR;
static {
Comparator<JpaPid> partitionComparator =
Comparator.comparing(t -> defaultIfNull(t.getPartitionId(), Integer.MIN_VALUE));
Comparator<JpaPid> idComparator = Comparator.comparing(t -> t.myId);
COMPARATOR = ComparatorUtils.chainedComparator(List.of(partitionComparator, idComparator));
}
private JpaPid(Long theId) {
super(null);
myId = theId;
@ -67,6 +81,13 @@ public class JpaPid extends BaseResourcePersistentId<Long> {
return this;
}
public void setPartitionId(Integer thePartitionId) {
if (myPartitionablePartitionId == null) {
myPartitionablePartitionId = new PartitionablePartitionId();
}
myPartitionablePartitionId.setPartitionId(thePartitionId);
}
public static List<Long> toLongList(JpaPid[] thePids) {
return toLongList(Arrays.asList(thePids));
}
@ -99,6 +120,12 @@ public class JpaPid extends BaseResourcePersistentId<Long> {
return new JpaPid(theId);
}
public static JpaPid fromId(Long theId, Integer thePartitionId) {
JpaPid retVal = new JpaPid(theId);
retVal.setPartitionablePartitionId(PartitionablePartitionId.with(thePartitionId, null));
return retVal;
}
public static JpaPid fromIdAndVersion(Long theId, Long theVersion) {
return new JpaPid(theId, theVersion);
}
@ -115,14 +142,13 @@ public class JpaPid extends BaseResourcePersistentId<Long> {
public boolean equals(Object theO) {
if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false;
if (!super.equals(theO)) return false;
JpaPid jpaPid = (JpaPid) theO;
return myId.equals(jpaPid.myId);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), myId);
return Objects.hash(myId);
}
@Override
@ -135,9 +161,15 @@ public class JpaPid extends BaseResourcePersistentId<Long> {
return myId.toString();
}
@Override
public int compareTo(@Nonnull JpaPid theOther) {
return COMPARATOR.compare(this, theOther);
}
public Integer getPartitionId() {
// wipmb should we return null instead?
assert getPartitionablePartitionId() != null;
if (getPartitionablePartitionId() == null) {
return null;
}
return getPartitionablePartitionId().getPartitionId();
}
}

View File

@ -36,6 +36,16 @@ import org.apache.commons.lang3.builder.ToStringStyle;
import static ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable.SOURCE_URI_LENGTH;
/**
* This entity is deprecated - It stores the source URI and Request ID
* fields so that they can be indexed and searched discretely. In
* HAPI FHIR 6.8.0 we added equivalent columns to {@link ResourceHistoryTable}
* and started populating both those columns and the ones in this table.
* As of HAPI FHIR 8.0.0 we are no longer using this table unless
* the "AccessMetaSourceInformationFromProvenanceTable" on JpaStorageSettings
* is enabled (it's disabled by default). In the future we will remove
* this table entirely.
*/
@Table(
name = "HFJ_RES_VER_PROV",
indexes = {

View File

@ -37,7 +37,6 @@ import jakarta.persistence.JoinColumn;
import jakarta.persistence.Lob;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany;
import jakarta.persistence.OneToOne;
import jakarta.persistence.Table;
import jakarta.persistence.Transient;
import jakarta.persistence.UniqueConstraint;
@ -119,10 +118,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
@OptimisticLock(excluded = true)
private ResourceEncodingEnum myEncoding;
@OneToOne(
mappedBy = "myResourceHistoryTable",
cascade = {CascadeType.REMOVE})
private ResourceHistoryProvenanceEntity myProvenance;
// TODO: This was added in 6.8.0 - In the future we should drop ResourceHistoryProvenanceEntity
@Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true)
private String mySourceUri;
@ -180,10 +175,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
myResourceTextVc = theResourceTextVc;
}
public ResourceHistoryProvenanceEntity getProvenance() {
return myProvenance;
}
public void addTag(ResourceTag theTag) {
ResourceHistoryTag tag = new ResourceHistoryTag(this, theTag.getTag(), getPartitionId());
tag.setResourceType(theTag.getResourceType());

View File

@ -31,6 +31,8 @@ import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table;
import jakarta.persistence.UniqueConstraint;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.annotations.GenericGenerator;
import java.io.Serializable;
@ -121,4 +123,17 @@ public class ResourceHistoryTag extends BaseTag implements Serializable {
public Long getId() {
return myId;
}
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
b.append("id", getId());
if (getPartitionId() != null) {
b.append("partId", getPartitionId().getPartitionId());
}
b.append("versionId", myResourceHistoryPid);
b.append("resId", getResourceId());
b.append("tag", getTag().getId());
return b.build();
}
}

View File

@ -3557,6 +3557,7 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
myCaptureQueriesListener.clear();
myObservationDao.update(obs);
myCaptureQueriesListener.logSelectQueries();
assertEquals(10, myCaptureQueriesListener.countSelectQueries());
assertEquals(5, myCaptureQueriesListener.countUpdateQueries());
assertEquals(1, myCaptureQueriesListener.countInsertQueries());

View File

@ -17,7 +17,6 @@ import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoTestUtils;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
@ -54,6 +53,7 @@ import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.util.ClasspathUtil;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.hl7.fhir.dstu3.model.Age;
import org.hl7.fhir.dstu3.model.Attachment;
@ -110,6 +110,7 @@ import org.junit.jupiter.params.provider.CsvSource;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
import org.testcontainers.shaded.org.bouncycastle.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
@ -613,15 +614,6 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
.getSingleResult();
assertThat(historyCount).as("only create one history version").isEqualTo(1);
// make sure the search view works too
ResourceSearchView readBackView = myEntityManager
.createQuery("select v from ResourceSearchView v where v.myResourceId = :resId", ResourceSearchView.class)
.setParameter("resId", myMethodOutcome.getPersistentId().getId())
.getSingleResult();
assertThat(readBackView).as("found search view").isNotNull();
assertEquals(myExpectedId, readBackView.getFhirId(),
"fhir_id populated");
}
}

View File

@ -6,6 +6,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
import ca.uhn.fhir.jpa.interceptor.UserRequestRetryVersionConflictsInterceptor;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc;
@ -171,8 +172,8 @@ public class FhirResourceDaoR4ConcurrentCreateTest extends BaseJpaR4Test {
myResourceSearchUrlDao.saveAll(asList(entry1, entry2));
// when
myResourceSearchUrlSvc.deleteByResId(entry1.getResourcePid());
myResourceSearchUrlSvc.deleteByResId(nonExistentResourceId);
myResourceSearchUrlSvc.deleteByResId(JpaPid.fromId(entry1.getResourcePid()));
myResourceSearchUrlSvc.deleteByResId(JpaPid.fromId(nonExistentResourceId));
// then
List<Long> resourcesPids = getStoredResourceSearchUrlEntitiesPids();

View File

@ -73,12 +73,12 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
// Current version should be marked as deleted
runInTransaction(() -> {
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1);
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1);
assertNull(resourceTable.getDeleted());
assertNotNull(resourceTable.getPersistentId());
});
runInTransaction(() -> {
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2);
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2);
assertNotNull(resourceTable.getDeleted());
});
@ -215,7 +215,7 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
// Mark the current history version as not-deleted even though the actual resource
// table entry is marked deleted
runInTransaction(() -> {
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2);
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2);
resourceTable.setDeleted(null);
myResourceHistoryTableDao.save(resourceTable);
});

View File

@ -30,7 +30,7 @@ public class FhirResourceDaoR4InlineResourceModeTest extends BaseJpaR4Test {
relocateResourceTextToCompressedColumn(pid, 1L);
runInTransaction(()->{
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(pid, 1);
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersion(pid, 1);
assertNotNull(historyEntity.getResource());
assertNull(historyEntity.getResourceTextVc());
assertEquals(ResourceEncodingEnum.JSONC, historyEntity.getEncoding());

View File

@ -18,6 +18,7 @@ import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
@ -25,6 +26,7 @@ import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import static ca.uhn.fhir.interceptor.api.Pointcut.STORAGE_PARTITION_IDENTIFY_ANY;
import static org.assertj.core.api.Assertions.assertThat;
@ -175,8 +177,10 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
}
@Test
public void testSearchByProfile_VersionedMode() {
@ParameterizedTest
@EnumSource(value = JpaStorageSettings.TagStorageModeEnum.class, names = {"NON_VERSIONED", "VERSIONED"})
public void testSearchByProfile_VersionedAndNonVersionedMode(JpaStorageSettings.TagStorageModeEnum theTagStorageModeEnum) {
myStorageSettings.setTagStorageMode(theTagStorageModeEnum);
// Put a tag in so we can search for it
String code = "http://" + UUID.randomUUID();
@ -185,24 +189,33 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
myMemoryCacheService.invalidateAllCaches();
logAllResourceTags();
logAllResourceHistoryTags();
// Search
myCaptureQueriesListener.clear();
SearchParameterMap map = SearchParameterMap.newSynchronous()
.add(Constants.PARAM_PROFILE, new TokenParam(code));
IBundleProvider outcome = myPatientDao.search(map, mySrd);
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
assertEquals(3, myCaptureQueriesListener.logSelectQueries().size());
// Query 1 - Find resources: Make sure we search for tag type+system+code always
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql);
// Query 2 - Load resourece contents
// Query 2 - Load resource contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)");
// Query 3 - Load tags and defintions
assertThat(sql).contains("where rht1_0.RES_ID in (?)");
// Query 3 - Load tags and definitions
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(false, false);
if (theTagStorageModeEnum == JpaStorageSettings.TagStorageModeEnum.VERSIONED) {
assertThat(sql).contains("from HFJ_HISTORY_TAG rht1_0 join HFJ_TAG_DEF");
} else {
assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF");
}
assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id);
List<String> profileDeclarations = outcome.getResources(0, 1).get(0).getMeta().getProfile().stream().map(t -> t.getValueAsString()).collect(Collectors.toList());
assertThat(profileDeclarations).containsExactly(code);
}
@Test
@ -234,7 +247,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql);
// Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)");
assertThat(sql).contains("where rht1_0.RES_ID in (?)");
assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id);

View File

@ -302,7 +302,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
table.setDeleted(new Date());
table = myResourceTableDao.saveAndFlush(table);
ResourceHistoryTable newHistory = table.toHistory(true);
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(table.getId(), 1L);
ResourceHistoryTable currentHistory = myResourceHistoryTableDao.findForIdAndVersion(table.getId(), 1L);
newHistory.setEncoding(currentHistory.getEncoding());
newHistory.setResourceTextVc(currentHistory.getResourceTextVc());
myResourceHistoryTableDao.save(newHistory);
@ -2934,7 +2934,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
tx.execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus theStatus) {
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L);
ResourceHistoryTable table = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L);
String newContent = myFhirContext.newJsonParser().encodeResourceToString(p);
newContent = newContent.replace("male", "foo");
table.setResourceTextVc(newContent);

View File

@ -1,9 +1,5 @@
package ca.uhn.fhir.jpa.dao.r4;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.assertNull;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
@ -15,6 +11,8 @@ import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.BundleBuilder;
import jakarta.annotation.Nonnull;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.BooleanType;
@ -31,7 +29,6 @@ import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.Task;
import jakarta.annotation.Nonnull;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
@ -40,7 +37,6 @@ import org.junit.jupiter.api.Test;
import org.junit.platform.commons.annotation.Testable;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
@ -52,7 +48,10 @@ import java.util.stream.Collectors;
import static ca.uhn.fhir.util.HapiExtensions.EXTENSION_AUTO_VERSION_REFERENCES_AT_PATH;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.fail;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@ -70,8 +69,10 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
@Nested
public class AutoVersionReferencesWithSettingAndExtension extends AutoVersionReferencesWithExtension {
@Override
@BeforeEach
public void before() {
super.before();
beforeAutoVersionReferencesWithSetting();
}
}
@ -219,7 +220,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
@Test
public void testCreateAndUpdateVersionedReferencesInTransaction_VersionedReferenceToVersionedReferenceToUpsertWithChange() {
AtomicInteger counter = new AtomicInteger();
final AtomicInteger counter = new AtomicInteger();
Supplier<Bundle> supplier = () -> {
BundleBuilder bb = new BundleBuilder(myFhirContext);
@ -229,12 +230,12 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
organization.setActive(true);
bb.addTransactionUpdateEntry(organization);
Patient patient = new Patient();
patient.getMeta().setExtension(patientAutoVersionExtension);
patient.setId("Patient/A");
patient.setManagingOrganization(new Reference("Organization/O"));
patient.setActive(true);
bb.addTransactionUpdateEntry(patient);
Patient patient1 = new Patient();
patient1.getMeta().setExtension(patientAutoVersionExtension);
patient1.setId("Patient/A");
patient1.setManagingOrganization(new Reference("Organization/O"));
patient1.setActive(true);
bb.addTransactionUpdateEntry(patient1);
ExplanationOfBenefit eob = new ExplanationOfBenefit();
eob.getMeta().setExtension(explanationOfBenefitAutoVersionExtension);
@ -274,7 +275,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
public void testInsertVersionedReferenceAtPath() {
Patient p = new Patient();
p.setActive(true);
IIdType patientId = myPatientDao.create(p).getId().toUnqualified();
IIdType patientId = myPatientDao.create(p, mySrd).getId().toUnqualified();
assertEquals("1", patientId.getVersionIdPart());
assertNull(patientId.getBaseUrl());
String patientIdString = patientId.getValue();
@ -283,10 +284,10 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
Observation observation = new Observation();
observation.getMeta().setExtension(observationAutoVersionExtension);
observation.getSubject().setReference(patientId.toVersionless().getValue());
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified();
// Read back and verify that reference is now versioned
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientIdString, observation.getSubject().getReference());
myCaptureQueriesListener.clear();
@ -297,13 +298,13 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
observation.setId(observationId);
observation.addIdentifier().setSystem("http://foo").setValue("bar");
observation.getSubject().setReference(patientId.toVersionless().getValue());
myObservationDao.update(observation);
myObservationDao.update(observation, mySrd);
// Make sure we're not introducing any extra DB operations
assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(5);
// Read back and verify that reference is now versioned
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientIdString, observation.getSubject().getReference());
}
@ -338,7 +339,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
assertTrue(observationId.hasVersionIdPart());
// Read back and verify that reference is now versioned
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientId.getValue(), observation.getSubject().getReference());
assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference());
}
@ -354,11 +355,11 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
Encounter encounter = new Encounter();
encounter.setId(IdType.newRandomUuid());
encounter.addIdentifier().setSystem("http://baz").setValue("baz");
myEncounterDao.create(encounter);
myEncounterDao.create(encounter, mySrd);
}
// Verify Patient Version
assertThat(myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false")))
assertThat(myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false")), mySrd)
.getResources(0, 1).get(0).getIdElement().getVersionIdPart()).isEqualTo("2");
BundleBuilder builder = new BundleBuilder(myFhirContext);
@ -386,7 +387,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
IdType observationId = new IdType(outcome.getEntry().get(2).getResponse().getLocation());
// Read back and verify that reference is now versioned
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientId.getValue(), observation.getSubject().getReference());
assertEquals("2", observation.getSubject().getReferenceElement().getVersionIdPart());
assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference());
@ -402,11 +403,11 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
Patient patient = new Patient();
patient.setId("PATIENT");
patient.setActive(true);
myPatientDao.update(patient).getId();
myPatientDao.update(patient, mySrd);
// Update patient to make a second version
patient.setActive(false);
myPatientDao.update(patient);
myPatientDao.update(patient, mySrd);
}
BundleBuilder builder = new BundleBuilder(myFhirContext);
@ -431,7 +432,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(2);
// Read back and verify that reference is now versioned
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientId.getValue(), observation.getSubject().getReference());
}
@ -466,7 +467,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
assertThat(myCaptureQueriesListener.logSelectQueries()).hasSize(3);
// Read back and verify that reference is now versioned
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientId.getValue(), observation.getSubject().getReference());
}
@ -479,16 +480,16 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// create patient ahead of time
Patient patient = new Patient();
patient.setId(patientId);
DaoMethodOutcome outcome = myPatientDao.update(patient);
DaoMethodOutcome outcome = myPatientDao.update(patient, mySrd);
assertEquals(patientId + "/_history/1", outcome.getResource().getIdElement().getValue());
Patient returned = myPatientDao.read(idType);
Patient returned = myPatientDao.read(idType, mySrd);
assertNotNull(returned);
assertEquals(patientId + "/_history/1", returned.getId());
// update to change version
patient.setActive(true);
myPatientDao.update(patient);
myPatientDao.update(patient, mySrd);
Observation obs = new Observation();
obs.getMeta().setExtension(observationAutoVersionExtension);
@ -505,7 +506,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
assertNotNull(returnedTr);
// some verification
Observation obRet = myObservationDao.read(obs.getIdElement());
Observation obRet = myObservationDao.read(obs.getIdElement(), mySrd);
assertNotNull(obRet);
}
@ -529,9 +530,9 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
assertNotNull(returnedTr);
// some verification
Observation obRet = myObservationDao.read(obs.getIdElement());
Observation obRet = myObservationDao.read(obs.getIdElement(), mySrd);
assertNotNull(obRet);
Patient returned = myPatientDao.read(patientRef.getReferenceElement());
Patient returned = myPatientDao.read(patientRef.getReferenceElement(), mySrd);
assertNotNull(returned);
}
@ -554,7 +555,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
assertEquals("2", patient.getIdElement().getVersionIdPart());
// read back and verify that reference is versioned
messageHeader = myMessageHeaderDao.read(messageHeaderId);
messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd);
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
}
@ -599,8 +600,8 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
IdType messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
// read back and verify that reference is versioned and correct
Patient patient = myPatientDao.read(patientId);
MessageHeader messageHeader = myMessageHeaderDao.read(messageHeaderId);
Patient patient = myPatientDao.read(patientId, mySrd);
MessageHeader messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd);
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
// create bundle second time
@ -609,8 +610,8 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
messageHeaderId = new IdType(outcome.getEntry().get(1).getResponse().getLocation());
// read back and verify that reference is versioned and correct
patient = myPatientDao.read(patientId);
messageHeader = myMessageHeaderDao.read(messageHeaderId);
patient = myPatientDao.read(patientId, mySrd);
messageHeader = myMessageHeaderDao.read(messageHeaderId, mySrd);
assertEquals(patient.getIdElement().getValue(), messageHeader.getFocus().get(0).getReference());
}
@ -637,11 +638,11 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
Patient patient = new Patient();
patient.setId(thePatientId);
patient.setActive(true);
myPatientDao.create(patient).getId();
myPatientDao.create(patient, mySrd);
// update patient to make a second version
patient.setActive(false);
myPatientDao.update(patient);
myPatientDao.update(patient, mySrd);
return patient;
}
}
@ -652,17 +653,17 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
Patient p = new Patient();
p.setActive(true);
IIdType patientId = myPatientDao.create(p).getId().toUnqualified();
IIdType patientId = myPatientDao.create(p, mySrd).getId().toUnqualified();
assertEquals("1", patientId.getVersionIdPart());
assertNull(patientId.getBaseUrl());
String patientIdString = patientId.getValue();
Observation observation = new Observation();
observation.getSubject().setReference(patientIdString);
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified();
// Read back
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientIdString, observation.getSubject().getReference());
}
@ -672,21 +673,21 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
Patient p = new Patient();
p.setActive(true);
myPatientDao.create(p);
myPatientDao.create(p, mySrd);
// Update the patient
p.setActive(false);
IIdType patientId = myPatientDao.update(p).getId().toUnqualified();
IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified();
assertEquals("2", patientId.getVersionIdPart());
assertNull(patientId.getBaseUrl());
Observation observation = new Observation();
observation.getSubject().setReference(patientId.withVersion("1").getValue());
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified();
// Read back
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientId.withVersion("1").getValue(), observation.getSubject().getReference());
}
@ -698,20 +699,22 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Create the patient
Patient p = new Patient();
p.addIdentifier().setSystem("http://foo").setValue("1");
myPatientDao.create(p);
myPatientDao.create(p, mySrd);
// Update the patient
p.getIdentifier().get(0).setValue("2");
IIdType patientId = myPatientDao.update(p).getId().toUnqualified();
IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified();
assertEquals("2", patientId.getVersionIdPart());
Observation observation = new Observation();
observation.getSubject().setReference(patientId.withVersion("1").getValue());
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified();
logAllResourceLinks();
// Search - Non Synchronous for *
{
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL));
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL), mySrd);
assertEquals(1, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 1);
assertThat(resources).hasSize(2);
@ -721,7 +724,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Search - Non Synchronous for named include
{
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT));
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT), mySrd);
assertEquals(1, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 1);
assertThat(resources).hasSize(2);
@ -735,46 +738,63 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
public void testSearchAndIncludeVersionedReference_Synchronous() {
myFhirContext.getParserOptions().setStripVersionsFromReferences(false);
myStorageSettings.setRespectVersionsForSearchIncludes(true);
myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.VERSIONED);
// Create the patient
Patient p = new Patient();
p.getMeta().addTag("http://tag", "1", null);
p.addIdentifier().setSystem("http://foo").setValue("1");
myPatientDao.create(p);
myPatientDao.create(p, mySrd);
// Update the patient
// Update the patient - Add a second tag
p.getIdentifier().get(0).setValue("2");
IIdType patientId = myPatientDao.update(p).getId().toUnqualified();
p.getMeta().addTag("http://tag", "2", null);
IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified();
assertEquals("2", patientId.getVersionIdPart());
Observation observation = new Observation();
observation.getSubject().setReference(patientId.withVersion("1").getValue());
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified();
// Search - Non Synchronous for *
logAllResourceVersions();
logAllResourceHistoryTags();
// Search - Non-Synchronous for *
{
IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL));
myCaptureQueriesListener.clear();
IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL), mySrd);
assertEquals(2, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 2);
assertEquals(5, myCaptureQueriesListener.logSelectQueries().size());
assertThat(resources).hasSize(2);
assertEquals(observationId.getValue(), resources.get(0).getIdElement().getValue());
IBaseResource patient = resources.get(1);
assertEquals(patientId.withVersion("1").getValue(), patient.getIdElement().getValue());
assertThat(getTagCodes(patient)).asList().containsExactly("1");
ourLog.info("Patient: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient));
}
// Search - Non-Synchronous for named include
{
IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT), mySrd);
assertEquals(2, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 2);
assertThat(resources).hasSize(2);
assertEquals(observationId.getValue(), resources.get(0).getIdElement().getValue());
assertEquals(patientId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue());
assertThat(getTagCodes(resources.get(1))).asList().containsExactly("1");
}
// Search - Non Synchronous for named include
{
IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT));
assertEquals(2, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 2);
assertThat(resources).hasSize(2);
assertEquals(observationId.getValue(), resources.get(0).getIdElement().getValue());
assertEquals(patientId.withVersion("1").getValue(), resources.get(1).getIdElement().getValue());
}
@Nonnull
private static List<String> getTagCodes(IBaseResource patient) {
return patient.getMeta().getTag().stream().map(IBaseCoding::getCode).collect(Collectors.toList());
}
@Test
public void testSearchAndIncludeVersionedReference_WhenOnlyOneVersionExists() {
HashSet<String> refPaths = new HashSet<String>();
HashSet<String> refPaths = new HashSet<>();
refPaths.add("Task.basedOn");
myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths);
myStorageSettings.setRespectVersionsForSearchIncludes(true);
@ -782,15 +802,15 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Create a Condition
Condition condition = new Condition();
IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified();
IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified();
// Create a Task which is basedOn that Condition
Task task = new Task();
task.setBasedOn(Arrays.asList(new Reference(conditionId)));
IIdType taskId = myTaskDao.create(task).getId().toUnqualified();
task.setBasedOn(List.of(new Reference(conditionId)));
IIdType taskId = myTaskDao.create(task, mySrd).getId().toUnqualified();
// Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response
IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON));
IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd);
assertEquals(2, outcome.size());
List<IBaseResource> resources = outcome.getResources(0, 2);
assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2);
@ -800,10 +820,10 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Now, update the Condition to generate another version of it
condition.setRecordedDate(new Date(System.currentTimeMillis()));
String conditionIdString = myConditionDao.update(condition).getId().getValue();
myConditionDao.update(condition, mySrd.getId().getValue(), mySrd);
// Search for the Task again and make sure that we get the original version of the Condition resource in the Response
outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON));
outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd);
assertEquals(2, outcome.size());
resources = outcome.getResources(0, 2);
assertThat(resources).hasSize(2);
@ -814,7 +834,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
@Test
public void testSearchAndIncludeVersionedReference_WhenMultipleVersionsExist() {
HashSet<String> refPaths = new HashSet<String>();
HashSet<String> refPaths = new HashSet<>();
refPaths.add("Task.basedOn");
myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths);
myStorageSettings.setRespectVersionsForSearchIncludes(true);
@ -822,23 +842,24 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Create a Condition
Condition condition = new Condition();
IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified();
IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified();
ourLog.info("conditionId: {}", conditionId);
// Now, update the Condition 3 times to generate a 4th version of it
condition.setRecordedDate(new Date(System.currentTimeMillis()));
conditionId = myConditionDao.update(condition).getId();
myConditionDao.update(condition, mySrd);
condition.setRecordedDate(new Date(System.currentTimeMillis() + 1000000));
conditionId = myConditionDao.update(condition).getId();
myConditionDao.update(condition, mySrd);
condition.setRecordedDate(new Date(System.currentTimeMillis() + 2000000));
conditionId = myConditionDao.update(condition).getId();
conditionId = myConditionDao.update(condition, mySrd).getId().toUnqualified();
// Create a Task which is basedOn that Condition
Task task = new Task();
task.setBasedOn(Arrays.asList(new Reference(conditionId)));
IIdType taskId = myTaskDao.create(task).getId().toUnqualified();
task.setBasedOn(List.of(new Reference(conditionId)));
IIdType taskId = myTaskDao.create(task, mySrd).getId().toUnqualified();
// Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response
IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON));
IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd);
assertEquals(2, outcome.size());
List<IBaseResource> resources = outcome.getResources(0, 2);
assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2);
@ -849,7 +870,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
@Test
public void testSearchAndIncludeVersionedReference_WhenPreviouslyReferencedVersionOne() {
HashSet<String> refPaths = new HashSet<String>();
HashSet<String> refPaths = new HashSet<>();
refPaths.add("Task.basedOn");
myFhirContext.getParserOptions().setDontStripVersionsFromReferencesAtPaths(refPaths);
myStorageSettings.setRespectVersionsForSearchIncludes(true);
@ -857,32 +878,32 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Create a Condition
Condition condition = new Condition();
IIdType conditionId = myConditionDao.create(condition).getId().toUnqualified();
IIdType conditionId = myConditionDao.create(condition, mySrd).getId().toUnqualified();
ourLog.info("conditionId: \n{}", conditionId);
// Create a Task which is basedOn that Condition
Task task = new Task();
task.setBasedOn(Arrays.asList(new Reference(conditionId)));
IIdType taskId = myTaskDao.create(task).getId().toUnqualified();
task.setBasedOn(List.of(new Reference(conditionId)));
myTaskDao.create(task, mySrd).getId().toUnqualified();
// Now, update the Condition 3 times to generate a 4th version of it
condition.setRecordedDate(new Date(System.currentTimeMillis()));
conditionId = myConditionDao.update(condition).getId();
conditionId = myConditionDao.update(condition, mySrd).getId();
ourLog.info("UPDATED conditionId: \n{}", conditionId);
condition.setRecordedDate(new Date(System.currentTimeMillis() + 1000000));
conditionId = myConditionDao.update(condition).getId();
conditionId = myConditionDao.update(condition, mySrd).getId();
ourLog.info("UPDATED conditionId: \n{}", conditionId);
condition.setRecordedDate(new Date(System.currentTimeMillis() + 2000000));
conditionId = myConditionDao.update(condition).getId();
conditionId = myConditionDao.update(condition, mySrd).getId();
ourLog.info("UPDATED conditionId: \n{}", conditionId);
// Now, update the Task to refer to the latest version 4 of the Condition
task.setBasedOn(Arrays.asList(new Reference(conditionId)));
taskId = myTaskDao.update(task).getId();
task.setBasedOn(List.of(new Reference(conditionId)));
IIdType taskId = myTaskDao.update(task, mySrd).getId();
ourLog.info("UPDATED taskId: \n{}", taskId);
// Search for the Task using an _include=Task.basedOn and make sure we get the Condition resource in the Response
IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON));
IBundleProvider outcome = myTaskDao.search(SearchParameterMap.newSynchronous().addInclude(Task.INCLUDE_BASED_ON), mySrd);
assertEquals(2, outcome.size());
List<IBaseResource> resources = outcome.getResources(0, 2);
assertThat(resources.size()).as(resources.stream().map(t -> t.getIdElement().toUnqualified().getValue()).collect(Collectors.joining(", "))).isEqualTo(2);
@ -899,20 +920,20 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Create the patient
Patient p = new Patient();
p.addIdentifier().setSystem("http://foo").setValue("1");
myPatientDao.create(p);
myPatientDao.create(p, mySrd);
// Update the patient
p.getIdentifier().get(0).setValue("2");
IIdType patientId = myPatientDao.update(p).getId().toUnqualified();
IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified();
assertEquals("2", patientId.getVersionIdPart());
Observation observation = new Observation();
observation.getSubject().setReference(patientId.withVersion("1").getValue());
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified();
// Search - Non Synchronous for *
{
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL));
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL), mySrd);
assertEquals(1, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 1);
assertThat(resources).hasSize(2);
@ -922,7 +943,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Search - Non Synchronous for named include
{
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT));
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(Observation.INCLUDE_PATIENT), mySrd);
assertEquals(1, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 1);
assertThat(resources).hasSize(2);
@ -940,24 +961,24 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Create the patient
Patient p = new Patient();
p.addIdentifier().setSystem("http://foo").setValue("1");
myPatientDao.create(p);
myPatientDao.create(p, mySrd);
// Update the patient
p.getIdentifier().get(0).setValue("2");
IIdType patientId = myPatientDao.update(p).getId().toUnqualified();
IIdType patientId = myPatientDao.update(p, mySrd).getId().toUnqualified();
assertEquals("2", patientId.getVersionIdPart());
Observation observation = new Observation();
observation.getSubject().setReference(patientId.withVersion("1").getValue());
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
IIdType observationId = myObservationDao.create(observation, mySrd).getId().toUnqualified();
// Read the observation back
observation = myObservationDao.read(observationId);
observation = myObservationDao.read(observationId, mySrd);
assertEquals(patientId.toVersionless().getValue(), observation.getSubject().getReference());
// Search - Non Synchronous for *
{
IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL));
IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(IBaseResource.INCLUDE_ALL), mySrd);
assertEquals(2, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 2);
assertThat(resources).hasSize(2);
@ -967,7 +988,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
// Search - Non Synchronous for named include
{
IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT));
IBundleProvider outcome = myObservationDao.search(SearchParameterMap.newSynchronous().addInclude(Observation.INCLUDE_PATIENT), mySrd);
assertEquals(2, outcome.sizeOrThrowNpe());
List<IBaseResource> resources = outcome.getResources(0, 2);
assertThat(resources).hasSize(2);
@ -977,7 +998,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
}
@Test
public void testNoNpeOnEoBBundle() {
public void testNoNpeOnEoBBundle() throws IOException {
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(true);
List<String> strings = Arrays.asList(
"ExplanationOfBenefit.patient",
@ -989,9 +1010,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
);
myStorageSettings.setAutoVersionReferenceAtPaths(new HashSet<>(strings));
Bundle bundle = myFhirContext.newJsonParser().parseResource(Bundle.class,
new InputStreamReader(
FhirResourceDaoR4VersionedReferenceTest.class.getResourceAsStream("/npe-causing-bundle.json")));
Bundle bundle = loadResourceFromClasspath(Bundle.class, "/npe-causing-bundle.json");
Bundle transaction = mySystemDao.transaction(new SystemRequestDetails(), bundle);
@ -1005,12 +1024,12 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
Observation obs = new Observation();
obs.setId("Observation/CDE");
obs.setSubject(new Reference("Patient/ABC"));
DaoMethodOutcome update = myObservationDao.create(obs);
DaoMethodOutcome update = myObservationDao.create(obs, mySrd);
Observation resource = (Observation)update.getResource();
String versionedPatientReference = resource.getSubject().getReference();
assertEquals("Patient/ABC", versionedPatientReference);
Patient p = myPatientDao.read(new IdDt("Patient/ABC"));
Patient p = myPatientDao.read(new IdDt("Patient/ABC"), mySrd);
assertNotNull(p);
myStorageSettings.setAutoVersionReferenceAtPaths("Observation.subject");
@ -1018,7 +1037,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
obs = new Observation();
obs.setId("Observation/DEF");
obs.setSubject(new Reference("Patient/RED"));
update = myObservationDao.create(obs);
update = myObservationDao.create(obs, mySrd);
resource = (Observation)update.getResource();
versionedPatientReference = resource.getSubject().getReference();
@ -1052,7 +1071,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
IdType idType = new IdType(bundle.getEntry().get(0)
.getResource().getId());
// the bundle above contains an observation, so we'll verify it was created here
Observation obs = myObservationDao.read(idType);
Observation obs = myObservationDao.read(idType, mySrd);
assertNotNull(obs);
}
}

View File

@ -616,7 +616,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
TransactionTemplate template = new TransactionTemplate(myTxManager);
template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
template.execute((TransactionCallback<ResourceTable>) t -> {
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
ResourceHistoryTable resourceHistoryTable = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), id.getVersionIdPartAsLong());
resourceHistoryTable.setEncoding(ResourceEncodingEnum.JSON);
resourceHistoryTable.setResourceTextVc("{\"resourceType\":\"FOO\"}");
myResourceHistoryTableDao.save(resourceHistoryTable);
@ -661,7 +661,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
assertEquals(1, myPatientDao.search(searchParamMap).size().intValue());
runInTransaction(() -> {
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 3);
ResourceHistoryTable historyEntry = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 3);
assertNotNull(historyEntry);
myResourceHistoryTableDao.delete(historyEntry);
});

View File

@ -429,7 +429,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertLocalDateFromDbMatches(myPartitionDate, tags.get(0).getPartitionId().getPartitionDate());
// HFJ_RES_VER
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, 1L);
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(patientId, 1L);
assertEquals(myPartitionId, version.getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, version.getPartitionId().getPartitionDate());
@ -439,11 +439,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(myPartitionId, historyTags.get(0).getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate());
// HFJ_RES_VER_PROV
assertNotNull(version.getProvenance());
assertEquals(myPartitionId, version.getProvenance().getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, version.getProvenance().getPartitionId().getPartitionDate());
// HFJ_SPIDX_STRING
List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId);
ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * ")));
@ -517,7 +512,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertLocalDateFromDbMatches(myPartitionDate, tags.get(0).getPartitionId().getPartitionDate());
// HFJ_RES_VER
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, 1L);
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(patientId, 1L);
assertNull(version.getPartitionId().getPartitionId());
assertLocalDateFromDbMatches(myPartitionDate, version.getPartitionId().getPartitionDate());
@ -527,11 +522,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertNull(historyTags.get(0).getPartitionId().getPartitionId());
assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate());
// HFJ_RES_VER_PROV
assertNotNull(version.getProvenance());
assertNull(version.getProvenance().getPartitionId().getPartitionId());
assertLocalDateFromDbMatches(myPartitionDate, version.getProvenance().getPartitionId().getPartitionDate());
// HFJ_SPIDX_STRING
List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId);
String stringsDesc = strings.stream().map(ResourceIndexedSearchParamString::toString).sorted().collect(Collectors.joining("\n * "));
@ -778,7 +768,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
// HFJ_RES_VER
int version = 2;
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version);
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(patientId, version);
assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, resVer.getPartitionId().getPartitionDate());
@ -790,12 +780,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(myPartitionId, historyTags.get(1).getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(1).getPartitionId().getPartitionDate());
// HFJ_RES_VER_PROV
assertNotNull(resVer.getProvenance());
assertNotNull(resVer.getPartitionId());
assertEquals(myPartitionId, resVer.getProvenance().getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, resVer.getProvenance().getPartitionId().getPartitionDate());
// HFJ_SPIDX_STRING
List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId);
ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * ")));
@ -856,7 +840,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
// HFJ_RES_VER
int version = 2;
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version);
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(patientId, version);
assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, resVer.getPartitionId().getPartitionDate());

View File

@ -80,8 +80,10 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
myPartitionSettings.setDefaultPartitionId(ALTERNATE_DEFAULT_ID);
}
@Override
@AfterEach
public void after() {
public void after() throws Exception {
super.after();
myInterceptorRegistry.unregisterInterceptor(mySvc);
myInterceptorRegistry.unregisterInterceptor(myForceOffsetSearchModeInterceptor);
@ -171,7 +173,7 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
public void testCreateOrganization_ValidMembershipInCompartment() {
Organization org = new Organization();
org.setName("Foo");
Long id = myOrganizationDao.create(org).getId().getIdPartAsLong();
Long id = myOrganizationDao.create(org, mySrd).getId().getIdPartAsLong();
runInTransaction(() -> {
ResourceTable observation = myResourceTableDao.findById(id).orElseThrow(() -> new IllegalArgumentException());
@ -222,8 +224,9 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
List<SqlQuery> selectQueriesForCurrentThread = myCaptureQueriesListener.getSelectQueriesForCurrentThread();
assertEquals(3, selectQueriesForCurrentThread.size());
assertEquals(2, selectQueriesForCurrentThread.size());
assertThat(selectQueriesForCurrentThread.get(0).getSql(false, false)).contains("PARTITION_ID=?");
assertThat(selectQueriesForCurrentThread.get(1).getSql(false, false)).doesNotContain("PARTITION_ID=");
}

View File

@ -104,7 +104,7 @@ public class DiffProviderR4Test extends BaseResourceProviderR4Test {
createPatient(withId(id), withActiveTrue(), withFamily("SMITH"));
runInTransaction(() -> {
ResourceHistoryTable version2 = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2);
ResourceHistoryTable version2 = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 2);
myResourceHistoryTableDao.deleteByPid(version2.getId());
});

View File

@ -35,7 +35,7 @@ public class ResourceProviderInvalidDataR4Test extends BaseResourceProviderR4Tes
// Manually set the value to be an invalid decimal number
runInTransaction(() -> {
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id, 1);
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersion(id, 1);
String resourceText = resVer.getResourceTextVc();
resourceText = resourceText.replace("100", "-.100");
resVer.setResourceTextVc(resourceText);

View File

@ -3370,7 +3370,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus status) {
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id1.getIdPartAsLong(), 1);
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1);
myResourceHistoryTableDao.delete(version);
}
});
@ -3395,7 +3395,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus status) {
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id1.getIdPartAsLong(), 1);
ResourceHistoryTable version = myResourceHistoryTableDao.findForIdAndVersion(id1.getIdPartAsLong(), 1);
myResourceHistoryTableDao.delete(version);
}
});
@ -4257,6 +4257,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
@Test
public void testSearchReturnsSearchDate() throws Exception {
Date before = new Date();
sleepAtLeast(10);
//@formatter:off
Bundle found = myClient
@ -4267,6 +4268,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
.execute();
//@formatter:on
sleepAtLeast(10);
Date after = new Date();
InstantType updated = found.getMeta().getLastUpdatedElement();
@ -6807,6 +6809,7 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
TestUtil.sleepAtLeast(delayInMs + 100);
patient.getNameFirstRep().addGiven("Bob");
myClient.update().resource(patient).execute();
TestUtil.sleepAtLeast(100);
Patient unrelatedPatient = (Patient) myClient.create().resource(new Patient()).execute().getResource();
assertThat(patientId).isNotEqualTo(unrelatedPatient.getIdElement().getIdPartAsLong());
@ -6832,7 +6835,9 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
// Issue 3138 test case, verify behavior of _at
verifyAtBehaviourWhenQueriedDateDuringTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2);
verifyAtBehaviourWhenQueriedDateAfterTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2);
myCaptureQueriesListener.clear();
verifyAtBehaviourWhenQueriedDateBeforeTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2);
myCaptureQueriesListener.logSelectQueries();
// verify behavior of _since
verifySinceBehaviourWhenQueriedDateDuringTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2);
verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(patientId, delayInMs, dateV1, dateV2);
@ -6854,8 +6859,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV2, delayInMs);
assertTrue(timeBetweenUpdates.after(dateV1));
assertTrue(timeBetweenUpdates.after(dateV2));
List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates));
assertThat(resultIds).hasSize(1);
String url = myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates);
myCaptureQueriesListener.clear();
List<String> resultIds = searchAndReturnUnqualifiedIdValues(url);
assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(1);
assertThat(resultIds).contains("Patient/" + patientId + "/_history/2");
}
@ -6863,8 +6870,10 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, -delayInMs);
assertTrue(timeBetweenUpdates.before(dateV1));
assertTrue(timeBetweenUpdates.before(dateV2));
List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates));
assertThat(resultIds).hasSize(2);
String url = myServerBase + "/Patient/" + patientId + "/_history?_at=gt" + toStr(timeBetweenUpdates);
myCaptureQueriesListener.clear();
List<String> resultIds = searchAndReturnUnqualifiedIdValues(url);
assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(2);
assertThat(resultIds).contains("Patient/" + patientId + "/_history/1");
assertThat(resultIds).contains("Patient/" + patientId + "/_history/2");
}
@ -6873,11 +6882,22 @@ public class ResourceProviderR4Test extends BaseResourceProviderR4Test {
Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV1, delayInMs / 2);
assertTrue(timeBetweenUpdates.after(dateV1));
assertTrue(timeBetweenUpdates.before(dateV2));
List<String> resultIds = searchAndReturnUnqualifiedIdValues(myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates));
assertThat(resultIds).hasSize(1);
String url = myServerBase + "/Patient/" + patientId + "/_history?_since=" + toStr(timeBetweenUpdates);
myCaptureQueriesListener.clear();
List<String> resultIds = searchAndReturnUnqualifiedIdValues(url);
assertThat(resultIds).as(()->describeVersionsAndUrl(url)).hasSize(1);
assertThat(resultIds).contains("Patient/" + patientId + "/_history/2");
}
private String describeVersionsAndUrl(String theUrl) {
return runInTransaction(()->{
return "URL: " + theUrl + "\n\nHistory Entries:\n * " +
myResourceHistoryTableDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * ")) +
"\n\nSQL Queries:\n * " +
myCaptureQueriesListener.getSelectQueries().stream().map(t->t.getSql(true, false)).collect(Collectors.joining("\n * "));
});
}
private void verifySinceBehaviourWhenQueriedDateAfterTwoUpdatedDates(Long patientId, int delayInMs, Date dateV1, Date dateV2) throws IOException {
Date timeBetweenUpdates = DateUtils.addMilliseconds(dateV2, delayInMs);
assertTrue(timeBetweenUpdates.after(dateV1));

View File

@ -12,6 +12,8 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.ReindexParameters;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique;
@ -22,7 +24,6 @@ import ca.uhn.fhir.jpa.test.PatientReindexTestHelper;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import jakarta.annotation.PostConstruct;
import jakarta.persistence.Query;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
@ -67,8 +68,10 @@ public class ReindexTaskTest extends BaseJpaR4Test {
@AfterEach
public void after() {
myInterceptorRegistry.unregisterAllAnonymousInterceptors();
myStorageSettings.setStoreMetaSourceInformation(new JpaStorageSettings().getStoreMetaSourceInformation());
myStorageSettings.setPreserveRequestIdInResourceBody(new JpaStorageSettings().isPreserveRequestIdInResourceBody());
JpaStorageSettings defaults = new JpaStorageSettings();
myStorageSettings.setStoreMetaSourceInformation(defaults.getStoreMetaSourceInformation());
myStorageSettings.setPreserveRequestIdInResourceBody(defaults.isPreserveRequestIdInResourceBody());
myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaults.isAccessMetaSourceInformationFromProvenanceTable());
}
@Test
@ -171,7 +174,7 @@ public class ReindexTaskTest extends BaseJpaR4Test {
runInTransaction(()->{
assertEquals(20, myResourceHistoryTableDao.count());
for (ResourceHistoryTable history : myResourceHistoryTableDao.findAll()) {
assertNotNull(history.getResourceTextVc());
assertNotNull(history.getResourceTextVc(), ()->"Null history on: " + history);
assertNull(history.getResource());
}
});
@ -237,24 +240,34 @@ public class ReindexTaskTest extends BaseJpaR4Test {
public void testOptimizeStorage_AllVersions_CopyProvenanceEntityData() {
// Setup
myStorageSettings.setStoreMetaSourceInformation(JpaStorageSettings.StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID);
myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(true);
myStorageSettings.setPreserveRequestIdInResourceBody(true);
for (int i = 0; i < 10; i++) {
Patient p = new Patient();
p.setId("PATIENT" + i);
p.getMeta().setSource("http://foo#bar");
p.addIdentifier().setValue(String.valueOf(i));
myPatientDao.update(p, mySrd);
p.addIdentifier().setSystem("http://blah");
p.setActive(true);
myPatientDao.update(p, mySrd);
}
runInTransaction(()->{
List<ResourceHistoryTable> versions = myResourceHistoryTableDao.findAll();
for (var version : versions) {
ResourceHistoryProvenanceEntity provenance = new ResourceHistoryProvenanceEntity();
provenance.setResourceTable(version.getResourceTable());
provenance.setResourceHistoryTable(version);
provenance.setSourceUri("http://foo");
provenance.setRequestId("bar");
myResourceHistoryProvenanceDao.save(provenance);
}
});
runInTransaction(()->{
assertEquals(20, myResourceHistoryTableDao.count());
assertEquals(20, myResourceHistoryProvenanceDao.count());
Query query = myEntityManager.createQuery("UPDATE " + ResourceHistoryTable.class.getSimpleName() + " p SET p.mySourceUri = NULL, p.myRequestId = NULL");
assertEquals(20, query.executeUpdate());
});
runInTransaction(()-> {
@ -281,6 +294,7 @@ public class ReindexTaskTest extends BaseJpaR4Test {
// validate
runInTransaction(()-> {
assertEquals(0, myResourceHistoryProvenanceDao.count());
for (var next : myResourceHistoryProvenanceDao.findAll()) {
assertEquals("bar", next.getRequestId());
assertEquals("http://foo", next.getSourceUri());

View File

@ -360,7 +360,7 @@ public class GiantTransactionPerfTest {
}
@Override
public ResourceHistoryTable findForIdAndVersionAndFetchProvenance(long theId, long theVersion) {
public ResourceHistoryTable findForIdAndVersion(long theId, long theVersion) {
throw new UnsupportedOperationException();
}
@ -370,7 +370,7 @@ public class GiantTransactionPerfTest {
}
@Override
public Slice<ResourceHistoryTable> findForResourceIdAndReturnEntitiesAndFetchProvenance(Pageable thePage, Long theId, Long theDontWantVersion) {
public Slice<ResourceHistoryTable> findAllVersionsExceptSpecificForResourcePid(Pageable thePage, Long theId, Long theDontWantVersion) {
throw new UnsupportedOperationException();
}
@ -404,6 +404,11 @@ public class GiantTransactionPerfTest {
throw new UnsupportedOperationException();
}
@Override
public List<ResourceHistoryTable> findCurrentVersionsByResourcePidsAndFetchResourceTable(List<Long> theVersionlessPids) {
throw new UnsupportedOperationException();
}
@Nonnull
@Override
public List<ResourceHistoryTable> findAll() {

View File

@ -47,6 +47,18 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-tinder-test</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.jsqlparser</groupId>
<artifactId>jsqlparser</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

View File

@ -19,6 +19,7 @@ import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
@ -140,7 +141,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestR5Config.class})
@ContextConfiguration(classes = {TestR5Config.class, TestDaoSearch.Config.class})
public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuilder {
@Autowired
protected IJobCoordinator myJobCoordinator;
@ -419,12 +420,15 @@ public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuil
@AfterEach()
public void afterCleanupDao() {
myStorageSettings.setExpireSearchResults(new JpaStorageSettings().isExpireSearchResults());
myStorageSettings.setEnforceReferentialIntegrityOnDelete(new JpaStorageSettings().isEnforceReferentialIntegrityOnDelete());
myStorageSettings.setExpireSearchResultsAfterMillis(new JpaStorageSettings().getExpireSearchResultsAfterMillis());
myStorageSettings.setReuseCachedSearchResultsForMillis(new JpaStorageSettings().getReuseCachedSearchResultsForMillis());
myStorageSettings.setSuppressUpdatesWithNoChange(new JpaStorageSettings().isSuppressUpdatesWithNoChange());
myStorageSettings.setAllowContainsSearches(new JpaStorageSettings().isAllowContainsSearches());
JpaStorageSettings defaults = new JpaStorageSettings();
myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaults.isAccessMetaSourceInformationFromProvenanceTable());
myStorageSettings.setAllowContainsSearches(defaults.isAllowContainsSearches());
myStorageSettings.setEnforceReferentialIntegrityOnDelete(defaults.isEnforceReferentialIntegrityOnDelete());
myStorageSettings.setExpireSearchResults(defaults.isExpireSearchResults());
myStorageSettings.setExpireSearchResultsAfterMillis(defaults.getExpireSearchResultsAfterMillis());
myStorageSettings.setReuseCachedSearchResultsForMillis(defaults.getReuseCachedSearchResultsForMillis());
myStorageSettings.setSuppressUpdatesWithNoChange(defaults.isSuppressUpdatesWithNoChange());
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(defaults.isAutoCreatePlaceholderReferenceTargets());
myPagingProvider.setDefaultPageSize(BasePagingProvider.DEFAULT_DEFAULT_PAGE_SIZE);
myPagingProvider.setMaximumPageSize(BasePagingProvider.DEFAULT_MAX_PAGE_SIZE);

View File

@ -35,7 +35,7 @@ import org.mockito.Mock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Propagation;
import jakarta.annotation.Nonnull;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
@ -98,28 +98,28 @@ public class CrossPartitionReferencesTest extends BaseJpaR5Test {
Patient p1 = new Patient();
p1.setActive(true);
IIdType patient1Id = myPatientDao.create(p1, mySrd).getId().toUnqualifiedVersionless();
initializeCrossReferencesInterceptor();
logAllResources();
// Test
myCaptureQueriesListener.clear();
Patient p2 = new Patient();
p2.setActive(true);
p2.addLink().setOther(new Reference(patient1Id));
// Test
myCaptureQueriesListener.clear();
IIdType patient2Id = myPatientDao.create(p2, mySrd).getId().toUnqualifiedVersionless();
// Verify
myCaptureQueriesListener.logSelectQueries();
assertEquals(1, myCaptureQueriesListener.countCommits());
assertEquals(0, myCaptureQueriesListener.countRollbacks());
myCaptureQueriesListener.clear();
SearchParameterMap params = SearchParameterMap
.newSynchronous(Constants.PARAM_ID, new TokenParam(patient2Id.getValue()))
.addInclude(Patient.INCLUDE_LINK);
IBundleProvider search = myPatientDao.search(params, mySrd);
assertThat(toUnqualifiedVersionlessIdValues(search)).containsExactly(patient2Id.getValue(), patient1Id.getValue());
List<String> values = toUnqualifiedVersionlessIdValues(search);
myCaptureQueriesListener.logSelectQueries();
assertThat(values).containsExactly(patient2Id.getValue(), patient1Id.getValue());
assertThat(search.getAllResources()).hasSize(2);
search.getAllResources().forEach(p -> assertTrue(((Patient) p).getActive()));
}
@ -190,7 +190,7 @@ public class CrossPartitionReferencesTest extends BaseJpaR5Test {
}
private void initializeCrossReferencesInterceptor() {
when(myCrossPartitionReferencesDetectedInterceptor.handle(any(),any())).thenAnswer(t->{
when(myCrossPartitionReferencesDetectedInterceptor.handle(any(), any())).thenAnswer(t -> {
CrossPartitionReferenceDetails theDetails = t.getArgument(1, CrossPartitionReferenceDetails.class);
IIdType targetId = theDetails.getPathAndRef().getRef().getReferenceElement();
RequestPartitionId referenceTargetPartition = myPartitionHelperSvc.determineReadPartitionForRequestForRead(theDetails.getRequestDetails(), targetId.getResourceType(), targetId);
@ -232,11 +232,12 @@ public class CrossPartitionReferencesTest extends BaseJpaR5Test {
private static RequestPartitionId selectPartition(String resourceType) {
switch (resourceType) {
case "Patient":
case "RelatedPerson":
return PARTITION_PATIENT;
case "Observation":
return PARTITION_OBSERVATION;
default:
throw new InternalErrorException("Don't know how to handle resource type");
throw new InternalErrorException("Don't know how to handle resource type: " + resourceType);
}
}

View File

@ -52,7 +52,7 @@ public class ExternallyStoredResourceR5Test extends BaseJpaR5Test {
runInTransaction(()->{
ResourceTable resource = myResourceTableDao.getReferenceById(id.getIdPartAsLong());
assertNotNull(resource);
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L);
ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersion(id.getIdPartAsLong(), 1L);
assertNotNull(history);
assertEquals(ResourceEncodingEnum.ESR, history.getEncoding());
assertEquals(MY_PROVIDER_ID + ":" + ADDRESS_123, history.getResourceTextVc());

View File

@ -17,6 +17,7 @@ import org.hl7.fhir.r5.model.Meta;
import org.hl7.fhir.r5.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import jakarta.annotation.Nonnull;
@ -298,33 +299,6 @@ public class FhirResourceDaoR5HistoryDisabledTest extends BaseJpaR5Test {
});
}
@Test
public void testUpdate_ProvenanceIsUpdatedInPlace() {
// Setup
myStorageSettings.setStoreMetaSourceInformation(JpaStorageSettings.StoreMetaSourceInformationEnum.SOURCE_URI_AND_REQUEST_ID);
Patient p = new Patient();
p.getMeta().setSource("source-1");
p.setActive(true);
when(mySrd.getRequestId()).thenReturn("request-id-1");
IIdType id1 = myPatientDao.create(p, mySrd).getId();
runInTransaction(()-> assertEquals(1, myResourceHistoryProvenanceDao.count()));
// Test
p = new Patient();
p.setId(id1);
p.addIdentifier().setValue("foo");
p.getMeta().setSource("source-2");
p.setActive(true);
when(mySrd.getRequestId()).thenReturn("request-id-2");
DaoMethodOutcome outcome = myPatientDao.update(p, mySrd);
// Verify
assertEquals("source-2#request-id-2", ((Patient) outcome.getResource()).getMeta().getSource());
p = myPatientDao.read(outcome.getId(), mySrd);
assertEquals("source-2#request-id-2", p.getMeta().getSource());
runInTransaction(()-> assertEquals(1, myResourceHistoryProvenanceDao.count()));
}
@Nonnull
private static List<String> toTagTokens(IBaseResource resource) {
List<String> tags = resource.getMeta()

View File

@ -0,0 +1,53 @@
package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.dao.r5.BaseJpaR5Test;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor;
import org.junit.jupiter.api.AfterEach;
import org.springframework.beans.factory.annotation.Autowired;
import static org.junit.jupiter.api.Assertions.assertFalse;
public class BaseDbpmJpaR5Test extends BaseJpaR5Test {
public static final String PARTITION_NAME_1 = "Partition_1";
public static final String PARTITION_NAME_2 = "Partition_2";
public static final int PARTITION_1 = 1;
public static final int PARTITION_2 = 2;
protected final TestPartitionSelectorInterceptor myPartitionSelectorInterceptor = new TestPartitionSelectorInterceptor();
@Autowired
private IPartitionLookupSvc myPartitionConfigSvc;
@Override
@AfterEach
protected void afterResetInterceptors() {
super.afterResetInterceptors();
myPartitionSettings.setPartitioningEnabled(false);
myInterceptorRegistry.unregisterInterceptor(myPartitionSelectorInterceptor);
}
protected void registerPartitionInterceptorAndCreatePartitions() {
assertFalse(myInterceptorRegistry.hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ), ()->myInterceptorRegistry.getAllRegisteredInterceptors().toString());
myInterceptorRegistry.registerInterceptor(myPartitionSelectorInterceptor);
myPartitionConfigSvc.createPartition(new PartitionEntity().setId(PARTITION_1).setName(PARTITION_NAME_1), null);
myPartitionConfigSvc.createPartition(new PartitionEntity().setId(PARTITION_2).setName(PARTITION_NAME_2), null);
// Load to pre-cache and avoid adding SQL queries
preFetchPartitionsIntoCache();
}
protected void preFetchPartitionsIntoCache() {
if (myPartitionSettings.isPartitioningEnabled()) {
myPartitionConfigSvc.getPartitionById(PARTITION_1);
myPartitionConfigSvc.getPartitionById(PARTITION_2);
myPartitionConfigSvc.getPartitionByName(PARTITION_NAME_1);
myPartitionConfigSvc.getPartitionByName(PARTITION_NAME_2);
}
}
}

View File

@ -0,0 +1,19 @@
package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode;
import ca.uhn.fhir.jpa.util.TestPartitionSelectorInterceptor;
import org.junit.jupiter.api.Nested;
/**
* This is a test verifying that we emit the right SQL for HAPI FHIR running in
* full legacy mode - No partitioning, no partition IDs in PKs.
*/
public class DbpmDisabledPartitioningDisabledTest extends BaseDbpmJpaR5Test {
@Nested
public class MyTestDefinitions extends TestDefinitions {
MyTestDefinitions() {
super(DbpmDisabledPartitioningDisabledTest.this, new TestPartitionSelectorInterceptor(), false, false);
}
}
}

View File

@ -0,0 +1,32 @@
package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
/**
* This is a test verifying that we emit the right SQL when running in
* legacy partition mode with DEFAULT partition value of null (the default if
* not configured otherwise) - Partition IDs are in use, but they aren't
* included in primary keys or joins.
*/
public class DbpmDisabledPartitioningEnabledNullDefaultPartitionTest extends BaseDbpmJpaR5Test {
@Override
@BeforeEach
public void before() throws Exception {
super.before();
myPartitionSettings.setPartitioningEnabled(true);
myPartitionSettings.setDefaultPartitionId(null);
registerPartitionInterceptorAndCreatePartitions();
}
@Nested
public class MyTestDefinitions extends TestDefinitions {
MyTestDefinitions() {
super(DbpmDisabledPartitioningEnabledNullDefaultPartitionTest.this, myPartitionSelectorInterceptor, true, false);
}
}
}

View File

@ -0,0 +1,31 @@
package ca.uhn.fhir.jpa.dao.r5.dbpartitionmode;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
/**
* This is a test verifying that we emit the right SQL when running in
* legacy partition mode - Partition IDs are in use, but they aren't
* included in primary keys or joins.
*/
public class DbpmDisabledPartitioningEnabledTest extends BaseDbpmJpaR5Test {
@Override
@BeforeEach
public void before() throws Exception {
super.before();
myPartitionSettings.setPartitioningEnabled(true);
myPartitionSettings.setDefaultPartitionId(0);
registerPartitionInterceptorAndCreatePartitions();
}
@Nested
public class MyTestDefinitions extends TestDefinitions {
MyTestDefinitions() {
super(DbpmDisabledPartitioningEnabledTest.this, myPartitionSelectorInterceptor, true, false);
}
}
}

View File

@ -703,7 +703,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
protected void relocateResourceTextToCompressedColumn(Long theResourcePid, Long theVersion) {
runInTransaction(()->{
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(theResourcePid, theVersion);
ResourceHistoryTable historyEntity = myResourceHistoryTableDao.findForIdAndVersion(theResourcePid, theVersion);
byte[] contents = GZipUtil.compress(historyEntity.getResourceTextVc());
myResourceHistoryTableDao.updateNonInlinedContents(contents, historyEntity.getId());
});

View File

@ -40,6 +40,7 @@ import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
import ca.uhn.fhir.jpa.dao.data.INpmPackageVersionDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboTokensNonUniqueDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamCoordsDao;
@ -49,33 +50,42 @@ import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamStringDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamTokenDao;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
import ca.uhn.fhir.jpa.dao.data.IResourceSearchUrlDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.dao.mdm.MdmLinkDaoJpaImpl;
import ca.uhn.fhir.jpa.entity.MdmLink;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboStringUnique;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceSearchUrlEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
@ -220,6 +230,8 @@ public abstract class BaseJpaTest extends BaseTest {
@Autowired
protected ISearchResultCacheSvc mySearchResultCacheSvc;
@Autowired
protected PartitionSettings myPartitionSettings;
@Autowired
protected ITermCodeSystemDao myTermCodeSystemDao;
@Autowired
protected ITermCodeSystemVersionDao myTermCodeSystemVersionDao;
@ -256,6 +268,8 @@ public abstract class BaseJpaTest extends BaseTest {
@Autowired
protected ITermConceptDao myTermConceptDao;
@Autowired
protected ITermConceptParentChildLinkDao myTermConceptParentChildLinkDao;
@Autowired
protected ITermValueSetConceptDao myTermValueSetConceptDao;
@Autowired
protected ITermValueSetDao myTermValueSetDao;
@ -277,6 +291,8 @@ public abstract class BaseJpaTest extends BaseTest {
@Autowired
private IResourceTableDao myResourceTableDao;
@Autowired
private IResourceSearchUrlDao myResourceSearchUrlDao;
@Autowired
private IResourceTagDao myResourceTagDao;
@Autowired
private IResourceHistoryTableDao myResourceHistoryTableDao;
@ -285,6 +301,8 @@ public abstract class BaseJpaTest extends BaseTest {
@Autowired
protected ITermDeferredStorageSvc myTermDeferredStorageSvc;
private final List<Object> myRegisteredInterceptors = new ArrayList<>(1);
@Autowired
private IResourceHistoryTagDao myResourceHistoryTagDao;
@SuppressWarnings("BusyWait")
public static void waitForSize(int theTarget, List<?> theList) {
@ -402,15 +420,22 @@ public abstract class BaseJpaTest extends BaseTest {
}
JpaStorageSettings defaultConfig = new JpaStorageSettings();
myStorageSettings.setAccessMetaSourceInformationFromProvenanceTable(defaultConfig.isAccessMetaSourceInformationFromProvenanceTable());
myStorageSettings.setAdvancedHSearchIndexing(defaultConfig.isAdvancedHSearchIndexing());
myStorageSettings.setAllowContainsSearches(defaultConfig.isAllowContainsSearches());
myStorageSettings.setDeleteEnabled(defaultConfig.isDeleteEnabled());
myStorageSettings.setIncludeHashIdentityForTokenSearches(defaultConfig.isIncludeHashIdentityForTokenSearches());
myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(defaultConfig.isMarkResourcesForReindexingUponSearchParameterChange());
myStorageSettings.setMaximumIncludesToLoadPerPage(defaultConfig.getMaximumIncludesToLoadPerPage());
myStorageSettings.setPreExpandValueSets(defaultConfig.isPreExpandValueSets());
myStorageSettings.getTreatBaseUrlsAsLocal().clear();
ParserOptions defaultParserOptions = new ParserOptions();
myFhirContext.getParserOptions().setStripVersionsFromReferences(defaultParserOptions.isStripVersionsFromReferences());
PartitionSettings defaultPartConfig = new PartitionSettings();
myPartitionSettings.setIncludePartitionInSearchHashes(defaultPartConfig.isIncludePartitionInSearchHashes());
myPartitionSettings.setAllowReferencesAcrossPartitions(defaultPartConfig.getAllowReferencesAcrossPartitions());
}
@AfterEach
@ -484,7 +509,7 @@ public abstract class BaseJpaTest extends BaseTest {
protected abstract PlatformTransactionManager getTxManager();
protected void logAllCodeSystemsAndVersionsCodeSystemsAndVersions() {
public void logAllCodeSystemsAndVersionsCodeSystemsAndVersions() {
runInTransaction(() -> {
ourLog.info("CodeSystems:\n * " + myTermCodeSystemDao.findAll()
.stream()
@ -519,13 +544,13 @@ public abstract class BaseJpaTest extends BaseTest {
});
}
protected void logAllResourceLinks() {
public void logAllResourceLinks() {
runInTransaction(() -> {
ourLog.info("Resource Links:\n * {}", myResourceLinkDao.findAll().stream().map(ResourceLink::toString).collect(Collectors.joining("\n * ")));
});
}
protected int logAllResources() {
public int logAllResources() {
return runInTransaction(() -> {
List<ResourceTable> resources = myResourceTableDao.findAll();
ourLog.info("Resources:\n * {}", resources.stream().map(ResourceTable::toString).collect(Collectors.joining("\n * ")));
@ -533,6 +558,14 @@ public abstract class BaseJpaTest extends BaseTest {
});
}
public int logAllResourceSearchUrls() {
return runInTransaction(() -> {
List<ResourceSearchUrlEntity> resources = myResourceSearchUrlDao.findAll();
ourLog.info("Search URLs:\n * {}", resources.stream().map(ResourceSearchUrlEntity::toString).collect(Collectors.joining("\n * ")));
return resources.size();
});
}
protected int logAllConceptDesignations() {
return runInTransaction(() -> {
List<TermConceptDesignation> resources = myTermConceptDesignationDao.findAll();
@ -544,12 +577,12 @@ public abstract class BaseJpaTest extends BaseTest {
protected int logAllConceptProperties() {
return runInTransaction(() -> {
List<TermConceptProperty> resources = myTermConceptPropertyDao.findAll();
ourLog.info("Concept Designations:\n * {}", resources.stream().map(TermConceptProperty::toString).collect(Collectors.joining("\n * ")));
ourLog.info("Concept Properties:\n * {}", resources.stream().map(TermConceptProperty::toString).collect(Collectors.joining("\n * ")));
return resources.size();
});
}
protected int logAllConcepts() {
public int logAllConcepts() {
return runInTransaction(() -> {
List<TermConcept> resources = myTermConceptDao.findAll();
ourLog.info("Concepts:\n * {}", resources.stream().map(TermConcept::toString).collect(Collectors.joining("\n * ")));
@ -557,10 +590,18 @@ public abstract class BaseJpaTest extends BaseTest {
});
}
protected int logAllValueSetConcepts() {
protected int logAllConceptParentChildLinks() {
return runInTransaction(() -> {
List<TermConceptParentChildLink> resources = myTermConceptParentChildLinkDao.findAll();
ourLog.info("Concept Parent/Child Links:\n * {}", resources.stream().map(TermConceptParentChildLink::toString).collect(Collectors.joining("\n * ")));
return resources.size();
});
}
public int logAllValueSetConcepts() {
return runInTransaction(() -> {
List<TermValueSetConcept> resources = myTermValueSetConceptDao.findAll();
ourLog.info("Concepts:\n * {}", resources.stream().map(TermValueSetConcept::toString).collect(Collectors.joining("\n * ")));
ourLog.info("ValueSet Concepts:\n * {}", resources.stream().map(TermValueSetConcept::toString).collect(Collectors.joining("\n * ")));
return resources.size();
});
}
@ -591,12 +632,26 @@ public abstract class BaseJpaTest extends BaseTest {
});
}
protected void logAllTokenIndexes() {
protected void logAllTokenIndexes(String... theParamNames) {
String messageSuffix = theParamNames.length > 0 ? " containing " + Arrays.asList(theParamNames) : "";
runInTransaction(() -> {
ourLog.info("Token indexes:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(ResourceIndexedSearchParamToken::toString).collect(Collectors.joining("\n * ")));
String message = getAllTokenIndexes(theParamNames)
.stream()
.map(ResourceIndexedSearchParamToken::toString)
.collect(Collectors.joining("\n * "));
ourLog.info("Token indexes{}:\n * {}", messageSuffix, message);
});
}
@Nonnull
protected List<ResourceIndexedSearchParamToken> getAllTokenIndexes(String... theParamNames) {
return runInTransaction(()->myResourceIndexedSearchParamTokenDao
.findAll()
.stream()
.filter(t -> theParamNames.length == 0 || Arrays.asList(theParamNames).contains(t.getParamName()))
.toList());
}
protected void logAllCoordsIndexes() {
runInTransaction(() -> {
ourLog.info("Coords indexes:\n * {}", myResourceIndexedSearchParamCoordsDao.findAll().stream().map(ResourceIndexedSearchParamCoords::toString).collect(Collectors.joining("\n * ")));
@ -609,7 +664,7 @@ public abstract class BaseJpaTest extends BaseTest {
});
}
protected void logAllUriIndexes() {
public void logAllUriIndexes() {
runInTransaction(() -> {
ourLog.info("URI indexes:\n * {}", myResourceIndexedSearchParamUriDao.findAll().stream().map(ResourceIndexedSearchParamUri::toString).collect(Collectors.joining("\n * ")));
});
@ -618,19 +673,33 @@ public abstract class BaseJpaTest extends BaseTest {
protected void logAllStringIndexes(String... theParamNames) {
String messageSuffix = theParamNames.length > 0 ? " containing " + Arrays.asList(theParamNames) : "";
runInTransaction(() -> {
String message = myResourceIndexedSearchParamStringDao
.findAll()
String message = getAllStringIndexes(theParamNames)
.stream()
.filter(t -> theParamNames.length == 0 ? true : Arrays.asList(theParamNames).contains(t.getParamName()))
.map(t -> t.toString())
.map(ResourceIndexedSearchParamString::toString)
.collect(Collectors.joining("\n * "));
ourLog.info("String indexes{}:\n * {}", messageSuffix, message);
});
}
@Nonnull
protected List<ResourceIndexedSearchParamString> getAllStringIndexes(String... theParamNames) {
return runInTransaction(()->myResourceIndexedSearchParamStringDao
.findAll()
.stream()
.filter(t -> theParamNames.length == 0 || Arrays.asList(theParamNames).contains(t.getParamName()))
.toList());
}
protected void logAllResourceTags() {
runInTransaction(() -> {
ourLog.info("Token tags:\n * {}", myResourceTagDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
ourLog.info("Resource tags:\n * {}", myResourceTagDao.findAll().stream().map(ResourceTag::toString).collect(Collectors.joining("\n * ")));
});
}
protected void logAllResourceHistoryTags() {
runInTransaction(() -> {
ourLog.info("Resource history tags:\n * {}", myResourceHistoryTagDao.findAll().stream().map(ResourceHistoryTag::toString).collect(Collectors.joining("\n * ")));
});
}

View File

@ -41,28 +41,42 @@ import static org.assertj.core.api.Assertions.assertThat;
public class PreventDanglingInterceptorsExtension implements BeforeEachCallback, AfterEachCallback {
private static final Logger ourLog = LoggerFactory.getLogger(PreventDanglingInterceptorsExtension.class);
private final Supplier<IInterceptorService> myInterceptorServiceSuplier;
private final Supplier<IInterceptorService> myIInterceptorServiceSupplier;
private List<Object> myBeforeInterceptors;
public PreventDanglingInterceptorsExtension(Supplier<IInterceptorService> theInterceptorServiceSuplier) {
myInterceptorServiceSuplier = theInterceptorServiceSuplier;
public PreventDanglingInterceptorsExtension(Supplier<IInterceptorService> theIInterceptorServiceSupplier) {
myIInterceptorServiceSupplier = theIInterceptorServiceSupplier;
}
@Override
public void beforeEach(ExtensionContext theExtensionContext) throws Exception {
myBeforeInterceptors = myInterceptorServiceSuplier.get().getAllRegisteredInterceptors();
myBeforeInterceptors = myIInterceptorServiceSupplier.get().getAllRegisteredInterceptors();
ourLog.info("Registered interceptors:\n * " + myBeforeInterceptors.stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
ourLog.info("Registered interceptors:\n * {}", myBeforeInterceptors.stream().map(Object::toString).collect(Collectors.joining("\n * ")));
}
@Override
public void afterEach(ExtensionContext theExtensionContext) throws Exception {
List<Object> afterInterceptors = myInterceptorServiceSuplier.get().getAllRegisteredInterceptors();
List<Object> afterInterceptors = myIInterceptorServiceSupplier.get().getAllRegisteredInterceptors();
// Handle interceptors added by the test
{
Map<Object, Object> delta = new IdentityHashMap<>();
afterInterceptors.forEach(t -> delta.put(t, t));
myBeforeInterceptors.forEach(t -> delta.remove(t));
delta.keySet().forEach(t->myInterceptorServiceSuplier.get().unregisterInterceptor(t));
assertThat(delta.isEmpty()).as(() -> "Test added interceptor(s) and did not clean them up:\n * " + delta.keySet().stream().map(t -> t.toString()).collect(Collectors.joining("\n * "))).isTrue();
myBeforeInterceptors.forEach(delta::remove);
delta.keySet().forEach(t -> myIInterceptorServiceSupplier.get().unregisterInterceptor(t));
assertThat(delta.isEmpty()).as(() -> "Test added interceptor(s) and did not clean them up:\n * " + delta.keySet().stream().map(Object::toString).collect(Collectors.joining("\n * "))).isTrue();
}
// Handle interceptors removed by the test
{
IdentityHashMap<Object, Object> delta = new IdentityHashMap<>();
myBeforeInterceptors.forEach(t -> delta.put(t, t));
afterInterceptors.forEach(t -> delta.remove(t, t));
for (Object t : delta.keySet()) {
ourLog.warn("Interceptor {} was removed by test, re-adding", t);
myIInterceptorServiceSupplier.get().registerInterceptor(t);
}
}
}
}

View File

@ -19,9 +19,12 @@
*/
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import java.util.UUID;
public final class DatabaseSupportUtil {
private DatabaseSupportUtil() {}
@ -50,4 +53,12 @@ public final class DatabaseSupportUtil {
&& StringUtils.isNotBlank(System.getenv("DOCKER_HOST"))
&& System.getenv("DOCKER_HOST").contains("colima");
}
/**
* Create a new connection to a randomized H2 database for testing
*/
public static DriverTypeEnum.ConnectionProperties newConnection() {
String url = "jdbc:h2:mem:test_migration-" + UUID.randomUUID() + ";CASE_INSENSITIVE_IDENTIFIERS=TRUE;";
return DriverTypeEnum.H2_EMBEDDED.newConnectionProperties(url, "SA", "SA");
}
}

View File

@ -0,0 +1,87 @@
/*-
* #%L
* HAPI FHIR JPA Server Test Utilities
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.partition.BaseRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseResource;
import java.util.HashSet;
import java.util.Set;
public class TestPartitionSelectorInterceptor {
private RequestPartitionId myNextPartition;
private final Set<String> myNonPartitionableResources = new HashSet<>();
private BaseRequestPartitionHelperSvc myHelperSvc = new RequestPartitionHelperSvc();
/**
* Constructor
*/
public TestPartitionSelectorInterceptor() {
super();
}
public TestPartitionSelectorInterceptor addNonPartitionableResource(@Nonnull String theResourceName) {
Validate.notBlank(theResourceName, "Must not be blank");
myNonPartitionableResources.add(theResourceName);
return this;
}
public void setNextPartitionId(Integer theNextPartitionId) {
myNextPartition = RequestPartitionId.fromPartitionId(theNextPartitionId);
}
public void setNextPartition(RequestPartitionId theNextPartition) {
myNextPartition = theNextPartition;
}
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE)
public RequestPartitionId selectPartitionCreate(IBaseResource theResource) {
String resourceType = FhirContext.forR5Cached().getResourceType(theResource);
return selectPartition(resourceType);
}
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
public RequestPartitionId selectPartitionRead(ReadPartitionIdRequestDetails theDetails) {
return selectPartition(theDetails.getResourceType());
}
@Nonnull
private RequestPartitionId selectPartition(String theResourceType) {
if (theResourceType != null) {
if (!myHelperSvc.isResourcePartitionable(theResourceType)) {
return RequestPartitionId.defaultPartition();
}
if (myNonPartitionableResources.contains(theResourceType)) {
return RequestPartitionId.defaultPartition();
}
}
assert myNextPartition != null;
return myNextPartition;
}
}

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.model.primitive.IdDt;
import org.hl7.fhir.r4.hapi.ctx.FhirR4;
import org.hl7.fhir.r4.model.Coding;
@ -28,14 +28,15 @@ public class JpaStorageResourceParserTest {
private FhirContext myFhirContext;
@Mock
private ResourceSearchView patientSearchView;
ResourceHistoryTable myEntity;
@InjectMocks
private final JpaStorageResourceParser jpaStorageResourceParser = new JpaStorageResourceParser();
@Test
public void testPopulateResourceMeta_doesNotRemoveTags_whenTagListIsEmpty() {
Mockito.when(myFhirContext.getVersion()).thenReturn(new FhirR4());
Mockito.when(patientSearchView.getIdDt()).thenReturn(new IdDt("Patient/test-patient/_history/1"));
Mockito.when(myEntity.getIdDt()).thenReturn(new IdDt("Patient/test-patient/_history/1"));
Coding coding = new Coding("system", "code", "display");
List<BaseTag> tagList = Collections.emptyList();
@ -45,7 +46,7 @@ public class JpaStorageResourceParserTest {
resourceTarget.getMeta().addTag(coding);
Patient actualResult = jpaStorageResourceParser
.populateResourceMetadata(patientSearchView, forHistoryOperation, tagList, version, resourceTarget);
.populateResourceMetadata(myEntity, forHistoryOperation, tagList, version, resourceTarget);
List<Coding> actualTagList = actualResult.getMeta().getTag();
assertFalse(actualTagList.isEmpty());

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.primitive.IdDt;
@ -70,20 +71,20 @@ public class ResourceLinkPredicateBuilderTest {
@Test
public void createEverythingPredicate_withListOfPids_returnsInPredicate() {
when(myResourceLinkPredicateBuilder.generatePlaceholders(anyCollection())).thenReturn(List.of(PLACEHOLDER_BASE + "1", PLACEHOLDER_BASE + "2"));
Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), 1L, 2L);
Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), JpaPid.fromId(1L), JpaPid.fromId(2L));
assertEquals(InCondition.class, condition.getClass());
}
@Test
public void createEverythingPredicate_withSinglePid_returnsInCondition() {
when(myResourceLinkPredicateBuilder.generatePlaceholders(anyCollection())).thenReturn(List.of(PLACEHOLDER_BASE + "1"));
Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), 1L);
Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), JpaPid.fromId(1L));
assertEquals(BinaryCondition.class, condition.getClass());
}
@Test
public void createEverythingPredicate_withNoPids_returnsBinaryCondition() {
Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), new Long[0]);
Condition condition = myResourceLinkPredicateBuilder.createEverythingPredicate("Patient", new ArrayList<>(), new JpaPid[0]);
assertEquals(BinaryCondition.class, condition.getClass());
}

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirMariaDBDialect;
import ca.uhn.fhir.jpa.model.dialect.HapiFhirOracleDialect;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
@ -55,7 +56,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new SQLServerDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
GeneratedSql generated;
// No range
@ -81,7 +82,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new SQLServerDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true);
GeneratedSql generated;
@ -109,7 +110,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new SQLServer2012Dialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
GeneratedSql generated;
// No range
@ -135,7 +136,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new SQLServer2012Dialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true);
GeneratedSql generated;
@ -162,7 +163,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new PostgreSQLDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
GeneratedSql generated;
// No range
@ -188,7 +189,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new PostgreSQLDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true);
GeneratedSql generated;
@ -215,7 +216,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new HapiFhirOracleDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
GeneratedSql generated;
// No range
@ -241,7 +242,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new HapiFhirOracleDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true);
GeneratedSql generated;
@ -268,7 +269,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new MySQL8Dialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
GeneratedSql generated;
// No range
@ -294,7 +295,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new MySQL8Dialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true);
GeneratedSql generated;
@ -325,7 +326,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new HapiFhirMariaDBDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
GeneratedSql generated;
// No range
@ -351,7 +352,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new HapiFhirMariaDBDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true);
GeneratedSql generated;
@ -382,7 +383,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new DerbyDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
GeneratedSql generated;
// No range
@ -408,7 +409,7 @@ public class SearchQueryBuilderTest {
HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider();
dialectProvider.setDialectForUnitTest(new DerbyDialect());
SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myStorageSettings, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false);
builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L));
builder.addResourceIdsPredicate(Lists.newArrayList(JpaPid.fromId(500L), JpaPid.fromId(501L)));
builder.addSortDate(builder.getOrCreateResourceTablePredicateBuilder().getColumnLastUpdated(), true);
GeneratedSql generated;

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.util;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
@ -237,5 +238,11 @@ class MemoryCacheServiceTest {
}
}
@Test
public void testToString() {
String actual = new MemoryCacheService.ForcedIdCacheKey("Patient", "12", RequestPartitionId.allPartitions()).toString();
assertEquals("MemoryCacheService.ForcedIdCacheKey[resType=Patient,resId=12,partId=RequestPartitionId[allPartitions=true]]", actual);
}
}

View File

@ -368,6 +368,10 @@ public class JpaStorageSettings extends StorageSettings {
* @since 7.2.0
*/
private boolean myWriteToLegacyLobColumns = false;
/**
* @since 8.0.0
*/
private boolean myAccessMetaSourceInformationFromProvenanceTable = false;
/**
* If this is enabled (default is {@literal false}), searches on token indexes will
@ -1764,6 +1768,37 @@ public class JpaStorageSettings extends StorageSettings {
myStoreMetaSourceInformation = theStoreMetaSourceInformation;
}
/**
* If set to <code>true</code> (default is false), the system will read
* <code>Resource.meta.source</code> values from the <code>HFJ_RES_VER_PROV</code>
* table. This table was replaced by dedicated columns in the <code>HFJ_RES_VER</code>
* table as of HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01) and as of that version
* there is no need to read from the dedicated table. However, if old data still
* remains and has not been migrated (using a $reindex operation) then you can
* enable this setting in order to read from the old table.
*
* @since 8.0.0
*/
public boolean isAccessMetaSourceInformationFromProvenanceTable() {
return myAccessMetaSourceInformationFromProvenanceTable;
}
/**
* If set to <code>true</code> (default is false), the system will read
* <code>Resource.meta.source</code> values from the <code>HFJ_RES_VER_PROV</code>
* table. This table was replaced by dedicated columns in the <code>HFJ_RES_VER</code>
* table as of HAPI FHIR 6.8.0 (Smile CDR 2023.08.R01) and as of that version
* there is no need to read from the dedicated table. However, if old data still
* remains and has not been migrated (using a $reindex operation) then you can
* enable this setting in order to read from the old table.
*
* @since 8.0.0
*/
public void setAccessMetaSourceInformationFromProvenanceTable(
boolean theAccessMetaSourceInformationFromProvenanceTable) {
myAccessMetaSourceInformationFromProvenanceTable = theAccessMetaSourceInformationFromProvenanceTable;
}
/**
* <p>
* If set to {@code true}, ValueSets and expansions are stored in terminology tables. This is to facilitate

View File

@ -20,6 +20,7 @@
package ca.uhn.fhir.jpa.dao;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.hl7.fhir.instance.model.api.IBaseResource;
/**
@ -30,10 +31,10 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
* Currently only DB->FHIR is enabled through this interface but the aim
* eventually is to handle both directions
*/
public interface IStorageResourceParser {
public interface IStorageResourceParser<T extends IResourcePersistentId<?>> {
// TODO: JA2 - Remove theForHistoryOperation flag - It toggles adding a bit of extra
// metadata but there's no reason to not always just add that, and this would
// simplify this interface
IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation);
IBaseResource toResource(IBasePersistedResource<T> theEntity, boolean theForHistoryOperation);
}

View File

@ -30,11 +30,11 @@ import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.trim;
@ -87,10 +87,16 @@ public abstract class BaseCaptureQueriesListener
&& next.getParametersList().get(0).size() > 0) {
size = next.getParametersList().size();
List<ParameterSetOperation> values = next.getParametersList().get(0);
params = values.stream()
.map(t -> t.getArgs()[1])
.map(t -> t != null ? t.toString() : "NULL")
.collect(Collectors.toList());
params = new ArrayList<>();
for (ParameterSetOperation t : values) {
if (t.getMethod().getName().equals("setNull")) {
params.add(null);
} else {
Object arg = t.getArgs()[1];
String s = arg != null ? arg.toString() : null;
params.add(s);
}
}
} else {
params = Collections.emptyList();
size = next.getParametersList().size();

View File

@ -29,6 +29,8 @@ import jakarta.annotation.Nonnull;
import jakarta.annotation.Nullable;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
@ -324,6 +326,15 @@ public class MemoryCacheService {
private final RequestPartitionId myRequestPartitionId;
private final int myHashCode;
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("resType", myResourceType)
.append("resId", myResourceId)
.append("partId", myRequestPartitionId)
.toString();
}
public ForcedIdCacheKey(
@Nullable String theResourceType,
@Nonnull String theResourceId,

View File

@ -138,10 +138,15 @@ public class SqlQuery {
break;
}
String nextParamValue = nextParams.remove(0);
String nextSubstitution;
if (nextParamValue != null) {
if (theSanitizeParams) {
nextParamValue = UrlUtil.sanitizeUrlPart(nextParamValue);
}
String nextSubstitution = "'" + nextParamValue + "'";
nextSubstitution = "'" + nextParamValue + "'";
} else {
nextSubstitution = "NULL";
}
retVal = retVal.substring(0, idx) + nextSubstitution + retVal.substring(idx + 1);
idx += nextSubstitution.length();
}

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.test.utilities;
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.util.FhirTerser;
@ -87,6 +88,13 @@ public interface ITestDataBuilder {
return t -> __setPrimitiveChild(getFhirContext(), t, "language", "string", theLanguage);
}
/**
* List.entry.item
*/
default ICreationArgument withListItem(IIdType theReference) {
return withElementAt("entry", withReference("item", theReference));
}
/**
* Set Patient.gender
*/
@ -239,6 +247,10 @@ public interface ITestDataBuilder {
return buildResource("Patient", theModifiers);
}
default IIdType createList(ICreationArgument... theModifiers) {
return createResource("List", theModifiers);
}
default IIdType createPatient(ICreationArgument... theModifiers) {
return createResource("Patient", theModifiers);
}
@ -321,7 +333,7 @@ public interface ITestDataBuilder {
IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance();
reference.setReference(theReferenceValue.getValue());
RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition((IBaseResource) t);
BaseRuntimeElementDefinition<?> resourceDef = getFhirContext().getElementDefinition(t.getClass());
resourceDef.getChildByName(theReferenceName).getMutator().addValue(t, reference);
}
};

View File

@ -376,6 +376,11 @@
<artifactId>caffeine</artifactId>
<version>${caffeine_version}</version>
</dependency>
<dependency>
<groupId>com.github.jsqlparser</groupId>
<artifactId>jsqlparser</artifactId>
<version>5.0</version>
</dependency>
<dependency>
<groupId>com.googlecode.owasp-java-html-sanitizer</groupId>
<artifactId>owasp-java-html-sanitizer</artifactId>
@ -567,7 +572,7 @@
<dependency>
<groupId>org.jetbrains</groupId>
<artifactId>annotations</artifactId>
<version>23.0.0</version>
<version>24.0.1</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>