Test fixes

This commit is contained in:
James Agnew 2024-10-24 14:00:14 -04:00
parent ac4f0ffde0
commit 4fe24cab65
13 changed files with 58 additions and 71 deletions

View File

@ -57,7 +57,6 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource; import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag; import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -1539,29 +1538,12 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
boolean haveSource = isNotBlank(source) && shouldStoreSource; boolean haveSource = isNotBlank(source) && shouldStoreSource;
boolean haveRequestId = isNotBlank(requestId) && shouldStoreRequestId; boolean haveRequestId = isNotBlank(requestId) && shouldStoreRequestId;
if (haveSource || haveRequestId) { if (haveSource || haveRequestId) {
ResourceHistoryProvenanceEntity provenance = null;
if (reusingHistoryEntity) {
/*
* If version history is disabled, then we may be reusing
* a previous history entity. If that's the case, let's try
* to reuse the previous provenance entity too.
*/
provenance = historyEntry.getProvenance();
}
if (provenance == null) {
provenance = historyEntry.toProvenance();
}
provenance.setResourceHistoryTable(historyEntry);
provenance.setResourceTable(theEntity);
provenance.setPartitionId(theEntity.getPartitionId());
if (haveRequestId) { if (haveRequestId) {
String persistedRequestId = left(requestId, Constants.REQUEST_ID_LENGTH); String persistedRequestId = left(requestId, Constants.REQUEST_ID_LENGTH);
provenance.setRequestId(persistedRequestId);
historyEntry.setRequestId(persistedRequestId); historyEntry.setRequestId(persistedRequestId);
} }
if (haveSource) { if (haveSource) {
String persistedSource = left(source, ResourceHistoryTable.SOURCE_URI_LENGTH); String persistedSource = left(source, ResourceHistoryTable.SOURCE_URI_LENGTH);
provenance.setSourceUri(persistedSource);
historyEntry.setSourceUri(persistedSource); historyEntry.setSourceUri(persistedSource);
} }
if (theResource != null) { if (theResource != null) {
@ -1571,8 +1553,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
shouldStoreRequestId ? requestId : null, shouldStoreRequestId ? requestId : null,
theResource); theResource);
} }
myEntityManager.persist(provenance);
} }
} }

View File

@ -43,6 +43,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome; import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome; import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.api.svc.IIdHelperService; import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService; import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil; import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
@ -51,6 +52,7 @@ import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag; import ca.uhn.fhir.jpa.model.entity.BaseTag;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.TagDefinition; import ca.uhn.fhir.jpa.model.entity.TagDefinition;
@ -204,6 +206,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
@Autowired @Autowired
private IJobCoordinator myJobCoordinator; private IJobCoordinator myJobCoordinator;
@Autowired
private IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao;
private IInstanceValidatorModule myInstanceValidator; private IInstanceValidatorModule myInstanceValidator;
private String myResourceName; private String myResourceName;
private Class<T> myResourceType; private Class<T> myResourceType;
@ -1694,7 +1699,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
int pageSize = 100; int pageSize = 100;
for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) { for (int page = 0; ((long) page * pageSize) < entity.getVersion(); page++) {
Slice<ResourceHistoryTable> historyEntities = Slice<ResourceHistoryTable> historyEntities =
myResourceHistoryTableDao.findForResourceIdAndReturnEntitiesAndFetchProvenance( myResourceHistoryTableDao.findAllVersionsExceptSpecificForResourcePid(
PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion()); PageRequest.of(page, pageSize), entity.getId(), historyEntity.getVersion());
for (ResourceHistoryTable next : historyEntities) { for (ResourceHistoryTable next : historyEntities) {
reindexOptimizeStorageHistoryEntity(entity, next); reindexOptimizeStorageHistoryEntity(entity, next);
@ -1716,11 +1721,18 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
} }
} }
if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) { if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
if (historyEntity.getProvenance() != null) { if (isBlank(historyEntity.getSourceUri()) && isBlank(historyEntity.getRequestId())) {
historyEntity.setSourceUri(historyEntity.getProvenance().getSourceUri()); Long id = historyEntity.getId();
historyEntity.setRequestId(historyEntity.getProvenance().getRequestId()); Optional<ResourceHistoryProvenanceEntity> provenanceEntityOpt =
changed = true; myResourceHistoryProvenanceDao.findById(id);
if (provenanceEntityOpt.isPresent()) {
ResourceHistoryProvenanceEntity provenanceEntity = provenanceEntityOpt.get();
historyEntity.setSourceUri(provenanceEntity.getSourceUri());
historyEntity.setRequestId(provenanceEntity.getRequestId());
myResourceHistoryProvenanceDao.delete(provenanceEntity);
changed = true;
}
} }
} }
if (changed) { if (changed) {

View File

@ -206,7 +206,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
* However, for realistic average workloads, this should reduce the number of round trips. * However, for realistic average workloads, this should reduce the number of round trips.
*/ */
if (idChunk.size() >= 2) { if (idChunk.size() >= 2) {
List<ResourceTable> entityChunk = prefetchResourceTableHistoryAndProvenance(idChunk); List<ResourceTable> entityChunk = prefetchResourceTableAndHistory(idChunk);
if (thePreFetchIndexes) { if (thePreFetchIndexes) {
@ -244,14 +244,13 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
} }
@Nonnull @Nonnull
private List<ResourceTable> prefetchResourceTableHistoryAndProvenance(List<Long> idChunk) { private List<ResourceTable> prefetchResourceTableAndHistory(List<Long> idChunk) {
assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked"; assert idChunk.size() < SearchConstants.MAX_PAGE_SIZE : "assume pre-chunked";
Query query = myEntityManager.createQuery("select r, h " Query query = myEntityManager.createQuery("select r, h "
+ " FROM ResourceTable r " + " FROM ResourceTable r "
+ " LEFT JOIN fetch ResourceHistoryTable h " + " LEFT JOIN fetch ResourceHistoryTable h "
+ " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId " + " on r.myVersion = h.myResourceVersion and r.id = h.myResourceId "
+ " left join fetch h.myProvenance "
+ " WHERE r.myId IN ( :IDS ) "); + " WHERE r.myId IN ( :IDS ) ");
query.setParameter("IDS", idChunk); query.setParameter("IDS", idChunk);

View File

@ -40,7 +40,6 @@ import jakarta.persistence.TypedQuery;
import jakarta.persistence.criteria.CriteriaBuilder; import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery; import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Expression; import jakarta.persistence.criteria.Expression;
import jakarta.persistence.criteria.JoinType;
import jakarta.persistence.criteria.Predicate; import jakarta.persistence.criteria.Predicate;
import jakarta.persistence.criteria.Root; import jakarta.persistence.criteria.Root;
import jakarta.persistence.criteria.Subquery; import jakarta.persistence.criteria.Subquery;
@ -122,8 +121,6 @@ public class HistoryBuilder {
addPredicatesToQuery(cb, thePartitionId, criteriaQuery, from, theHistorySearchStyle); addPredicatesToQuery(cb, thePartitionId, criteriaQuery, from, theHistorySearchStyle);
from.fetch("myProvenance", JoinType.LEFT);
/* /*
* The sort on myUpdated is the important one for _history operations, but there are * The sort on myUpdated is the important one for _history operations, but there are
* cases where multiple pages of results all have the exact same myUpdated value (e.g. * cases where multiple pages of results all have the exact same myUpdated value (e.g.

View File

@ -47,8 +47,8 @@ public interface IResourceHistoryTableDao extends JpaRepository<ResourceHistoryT
Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query( @Query(
"SELECT t FROM ResourceHistoryTable t LEFT OUTER JOIN FETCH t.myProvenance WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion") "SELECT t FROM ResourceHistoryTable t WHERE t.myResourceId = :resId AND t.myResourceVersion <> :dontWantVersion")
Slice<ResourceHistoryTable> findForResourceIdAndReturnEntitiesAndFetchProvenance( Slice<ResourceHistoryTable> findAllVersionsExceptSpecificForResourcePid(
Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion); Pageable thePage, @Param("resId") Long theId, @Param("dontWantVersion") Long theDontWantVersion);
@Query("" + "SELECT v.myId FROM ResourceHistoryTable v " @Query("" + "SELECT v.myId FROM ResourceHistoryTable v "

View File

@ -45,6 +45,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao; import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
import ca.uhn.fhir.jpa.model.dao.JpaPid; import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryProvenanceEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.jpa.util.MemoryCacheService;
@ -71,6 +72,7 @@ import org.springframework.transaction.support.TransactionSynchronizationManager
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@Service @Service
@ -238,9 +240,10 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
callHooks(theRequestDetails, theRemainingCount, version, id); callHooks(theRequestDetails, theRemainingCount, version, id);
if (version.getProvenance() != null) { if (myStorageSettings.isAccessMetaSourceInformationFromProvenanceTable()) {
myResourceHistoryProvenanceTableDao.deleteByPid( Optional<ResourceHistoryProvenanceEntity> provenanceOpt =
version.getProvenance().getId()); myResourceHistoryProvenanceTableDao.findById(theNextVersionId);
provenanceOpt.ifPresent(entity -> myResourceHistoryProvenanceTableDao.deleteByPid(entity.getId()));
} }
myResourceHistoryTagDao.deleteByPid(version.getId()); myResourceHistoryTagDao.deleteByPid(version.getId());

View File

@ -1184,7 +1184,8 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
* this only applies to includes, which don't tend to be massive in numbers. * this only applies to includes, which don't tend to be massive in numbers.
*/ */
if (resourcePidToVersion != null) { if (resourcePidToVersion != null) {
Long version = resourcePidToVersion.get(JpaPid.fromId(next.getResourceId())); JpaPid key = JpaPid.fromId(next.getResourceId());
Long version = resourcePidToVersion.get(key);
resourceId.setVersion(version); resourceId.setVersion(version);
if (version != null && !version.equals(next.getVersion())) { if (version != null && !version.equals(next.getVersion())) {
next = myResourceHistoryTableDao.findForIdAndVersion(next.getResourceId(), version); next = myResourceHistoryTableDao.findForIdAndVersion(next.getResourceId(), version);

View File

@ -21,23 +21,37 @@ package ca.uhn.fhir.jpa.model.dao;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId; import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId; import ca.uhn.fhir.rest.api.server.storage.BaseResourcePersistentId;
import jakarta.annotation.Nonnull;
import org.apache.commons.collections4.ComparatorUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Comparator;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
/** /**
* JPA implementation of IResourcePersistentId. JPA uses a Long as the primary key. This class should be used in any * JPA implementation of IResourcePersistentId. JPA uses a Long as the primary key. This class should be used in any
* context where the pid is known to be a Long. * context where the pid is known to be a Long.
*/ */
public class JpaPid extends BaseResourcePersistentId<Long> { public class JpaPid extends BaseResourcePersistentId<Long> implements Comparable<JpaPid> {
private final Long myId; private final Long myId;
private PartitionablePartitionId myPartitionablePartitionId; private PartitionablePartitionId myPartitionablePartitionId;
private static final Comparator<JpaPid> COMPARATOR;
static {
Comparator<JpaPid> partitionComparator =
Comparator.comparing(t -> defaultIfNull(t.getPartitionId(), Integer.MIN_VALUE));
Comparator<JpaPid> idComparator = Comparator.comparing(t -> t.myId);
COMPARATOR = ComparatorUtils.chainedComparator(List.of(partitionComparator, idComparator));
}
private JpaPid(Long theId) { private JpaPid(Long theId) {
super(null); super(null);
myId = theId; myId = theId;
@ -128,14 +142,13 @@ public class JpaPid extends BaseResourcePersistentId<Long> {
public boolean equals(Object theO) { public boolean equals(Object theO) {
if (this == theO) return true; if (this == theO) return true;
if (theO == null || getClass() != theO.getClass()) return false; if (theO == null || getClass() != theO.getClass()) return false;
if (!super.equals(theO)) return false;
JpaPid jpaPid = (JpaPid) theO; JpaPid jpaPid = (JpaPid) theO;
return myId.equals(jpaPid.myId); return myId.equals(jpaPid.myId);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(super.hashCode(), myId); return Objects.hash(myId);
} }
@Override @Override
@ -148,6 +161,11 @@ public class JpaPid extends BaseResourcePersistentId<Long> {
return myId.toString(); return myId.toString();
} }
@Override
public int compareTo(@Nonnull JpaPid theOther) {
return COMPARATOR.compare(this, theOther);
}
public Integer getPartitionId() { public Integer getPartitionId() {
if (getPartitionablePartitionId() == null) { if (getPartitionablePartitionId() == null) {
return null; return null;

View File

@ -37,7 +37,6 @@ import jakarta.persistence.JoinColumn;
import jakarta.persistence.Lob; import jakarta.persistence.Lob;
import jakarta.persistence.ManyToOne; import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany; import jakarta.persistence.OneToMany;
import jakarta.persistence.OneToOne;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.persistence.Transient; import jakarta.persistence.Transient;
import jakarta.persistence.UniqueConstraint; import jakarta.persistence.UniqueConstraint;
@ -119,10 +118,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
@OptimisticLock(excluded = true) @OptimisticLock(excluded = true)
private ResourceEncodingEnum myEncoding; private ResourceEncodingEnum myEncoding;
@OneToOne(
mappedBy = "myResourceHistoryTable",
cascade = {CascadeType.REMOVE})
private ResourceHistoryProvenanceEntity myProvenance;
// TODO: This was added in 6.8.0 - In the future we should drop ResourceHistoryProvenanceEntity // TODO: This was added in 6.8.0 - In the future we should drop ResourceHistoryProvenanceEntity
@Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true) @Column(name = "SOURCE_URI", length = SOURCE_URI_LENGTH, nullable = true)
private String mySourceUri; private String mySourceUri;
@ -180,10 +175,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
myResourceTextVc = theResourceTextVc; myResourceTextVc = theResourceTextVc;
} }
public ResourceHistoryProvenanceEntity getProvenance() {
return myProvenance;
}
public void addTag(ResourceTag theTag) { public void addTag(ResourceTag theTag) {
ResourceHistoryTag tag = new ResourceHistoryTag(this, theTag.getTag(), getPartitionId()); ResourceHistoryTag tag = new ResourceHistoryTag(this, theTag.getTag(), getPartitionId());
tag.setResourceType(theTag.getResourceType()); tag.setResourceType(theTag.getResourceType());

View File

@ -194,12 +194,12 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
// Query 1 - Find resources: Make sure we search for tag type+system+code always // Query 1 - Find resources: Make sure we search for tag type+system+code always
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false); String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql); assertEquals("SELECT t0.RES_ID FROM HFJ_RESOURCE t0 INNER JOIN HFJ_RES_TAG t1 ON (t0.RES_ID = t1.RES_ID) INNER JOIN HFJ_TAG_DEF t2 ON (t1.TAG_ID = t2.TAG_ID) WHERE (((t0.RES_TYPE = ?) AND (t0.RES_DELETED_AT IS NULL)) AND ((t2.TAG_TYPE = ?) AND (t2.TAG_SYSTEM = ?) AND (t2.TAG_CODE = ?)))", sql);
// Query 2 - Load resourece contents // Query 2 - Load resource contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false); sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)"); assertThat(sql).contains("where rht1_0.RES_ID in (?)");
// Query 3 - Load tags and defintions // Query 3 - Load tags and defintions
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(false, false); sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(false, false);
assertThat(sql).contains("from HFJ_RES_TAG rt1_0 join HFJ_TAG_DEF"); assertThat(sql).contains("from HFJ_HISTORY_TAG rht1_0 join HFJ_TAG_DEF");
assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id); assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id);
@ -234,7 +234,7 @@ public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql); assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_URI t0 WHERE (t0.HASH_URI = ?)", sql);
// Query 2 - Load resourece contents // Query 2 - Load resourece contents
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false); sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(false, false);
assertThat(sql).contains("where rsv1_0.RES_ID in (?)"); assertThat(sql).contains("where rht1_0.RES_ID in (?)");
assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id); assertThat(toUnqualifiedVersionlessIds(outcome)).containsExactly(id);

View File

@ -709,6 +709,8 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
observation.getSubject().setReference(patientId.withVersion("1").getValue()); observation.getSubject().setReference(patientId.withVersion("1").getValue());
IIdType observationId = myObservationDao.create(observation).getId().toUnqualified(); IIdType observationId = myObservationDao.create(observation).getId().toUnqualified();
logAllResourceLinks();
// Search - Non Synchronous for * // Search - Non Synchronous for *
{ {
IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL)); IBundleProvider outcome = myObservationDao.search(new SearchParameterMap().addInclude(IBaseResource.INCLUDE_ALL));

View File

@ -439,11 +439,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(myPartitionId, historyTags.get(0).getPartitionId().getPartitionId().intValue()); assertEquals(myPartitionId, historyTags.get(0).getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate());
// HFJ_RES_VER_PROV
assertNotNull(version.getProvenance());
assertEquals(myPartitionId, version.getProvenance().getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, version.getProvenance().getPartitionId().getPartitionDate());
// HFJ_SPIDX_STRING // HFJ_SPIDX_STRING
List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId);
ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * "))); ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * ")));
@ -528,11 +523,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertNull(historyTags.get(0).getPartitionId().getPartitionId()); assertNull(historyTags.get(0).getPartitionId().getPartitionId());
assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(0).getPartitionId().getPartitionDate());
// HFJ_RES_VER_PROV
assertNotNull(version.getProvenance());
assertNull(version.getProvenance().getPartitionId().getPartitionId());
assertLocalDateFromDbMatches(myPartitionDate, version.getProvenance().getPartitionId().getPartitionDate());
// HFJ_SPIDX_STRING // HFJ_SPIDX_STRING
List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId);
String stringsDesc = strings.stream().map(ResourceIndexedSearchParamString::toString).sorted().collect(Collectors.joining("\n * ")); String stringsDesc = strings.stream().map(ResourceIndexedSearchParamString::toString).sorted().collect(Collectors.joining("\n * "));
@ -791,12 +781,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
assertEquals(myPartitionId, historyTags.get(1).getPartitionId().getPartitionId().intValue()); assertEquals(myPartitionId, historyTags.get(1).getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(1).getPartitionId().getPartitionDate()); assertLocalDateFromDbMatches(myPartitionDate, historyTags.get(1).getPartitionId().getPartitionDate());
// HFJ_RES_VER_PROV
assertNotNull(resVer.getProvenance());
assertNotNull(resVer.getPartitionId());
assertEquals(myPartitionId, resVer.getProvenance().getPartitionId().getPartitionId().intValue());
assertLocalDateFromDbMatches(myPartitionDate, resVer.getProvenance().getPartitionId().getPartitionDate());
// HFJ_SPIDX_STRING // HFJ_SPIDX_STRING
List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId); List<ResourceIndexedSearchParamString> strings = myResourceIndexedSearchParamStringDao.findAllForResourceId(patientId);
ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * "))); ourLog.info("\n * {}", strings.stream().map(ResourceIndexedSearchParamString::toString).collect(Collectors.joining("\n * ")));

View File

@ -370,7 +370,7 @@ public class GiantTransactionPerfTest {
} }
@Override @Override
public Slice<ResourceHistoryTable> findForResourceIdAndReturnEntitiesAndFetchProvenance(Pageable thePage, Long theId, Long theDontWantVersion) { public Slice<ResourceHistoryTable> findAllVersionsExceptSpecificForResourcePid(Pageable thePage, Long theId, Long theDontWantVersion) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }