Stop writing to hfj_forced_id (#5817)
Stop writing to hfj_forced_id Stop reading ForcedId Cleanup client-id assignment Fix query counts in tests Drop the unused indexes and fk
This commit is contained in:
parent
b608633dcb
commit
e4ba56ed3a
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: change
|
||||
issue: 5817
|
||||
title: "The HFJ_FORCED_ID table is no longer used."
|
|
@ -0,0 +1,4 @@
|
|||
|
||||
## HFJ_FORCED_ID Table
|
||||
The HFJ_FORCED_ID table is no longer used.
|
||||
Users may delete it after upgrading to 7.2 to free database storage space.
|
|
@ -84,6 +84,15 @@ The HFJ_RESOURCE table indicates a single resource of any type in the database.
|
|||
Contains the resource type (e.g. <code>Patient</code>)
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>FHIR_ID</td>
|
||||
<td></td>
|
||||
<td>String</td>
|
||||
<td></td>
|
||||
<td>
|
||||
Contains the FHIR Resource id element. Either the PID, or the client-assigned id.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>HASH_SHA256</td>
|
||||
<td></td>
|
||||
|
@ -243,30 +252,6 @@ The complete raw contents of the resource is stored in either the `RES_TEXT` or
|
|||
</tbody>
|
||||
</table>
|
||||
|
||||
<a name="HFJ_FORCED_ID"/>
|
||||
|
||||
# HFJ_FORCED_ID: Client Assigned/Visible Resource IDs
|
||||
|
||||
By default, the **HFJ_RESOURCE.RES_ID** column is used as the resource ID for all server-assigned IDs. For example, if a Patient resource is created in a completely empty database, it will be assigned the ID `Patient/1` by the server and RES_ID will have a value of 1.
|
||||
|
||||
However, when client-assigned IDs are used, these may contain text values to allow a client to create an ID such
|
||||
as `Patient/ABC`. When a client-assigned ID is given to a resource, a row is created in the **HFJ_RESOURCE** table. When
|
||||
an **HFJ_FORCED_ID** row exists corresponding to the equivalent **HFJ_RESOURCE** row, the RES_ID value is no longer
|
||||
visible or usable by FHIR clients and it becomes purely an internal ID to the JPA server.
|
||||
|
||||
If the server has been configured with
|
||||
a [Resource Server ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setResourceServerIdStrategy(ca.uhn.fhir.jpa.api.config.JpaStorageSettings.IdStrategyEnum))
|
||||
of [UUID](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.IdStrategyEnum.html#UUID), or the
|
||||
server has been configured with
|
||||
a [Resource Client ID Strategy](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.html#setResourceClientIdStrategy(ca.uhn.fhir.jpa.api.config.JpaStorageSettings.ClientIdStrategyEnum))
|
||||
of [ANY](/apidocs/hapi-fhir-storage/undefined/ca/uhn/fhir/jpa/api/config/JpaStorageSettings.ClientIdStrategyEnum.html#ANY)
|
||||
the server will create a Forced ID for all resources (not only resources having textual IDs).
|
||||
|
||||
The **HFJ_RESOURCE** table now has a FHIR_ID column.
|
||||
This column is always populated; both for server-assigned ids and for client-assigned ids.
|
||||
As of Hapi release 6.10, this column is used in place of the **HFJ_FORCED_ID** table for id search and sort.
|
||||
A future version of Hapi will stop populating the **HFJ_FORCED_ID** table.
|
||||
|
||||
## Columns
|
||||
|
||||
<table class="table table-striped table-condensed">
|
||||
|
|
|
@ -37,7 +37,6 @@ import ca.uhn.fhir.jpa.api.dao.IJpaDao;
|
|||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
|
@ -195,9 +194,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
@Autowired
|
||||
protected IIdHelperService<JpaPid> myIdHelperService;
|
||||
|
||||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
|
||||
@Autowired
|
||||
protected ISearchCoordinatorSvc<JpaPid> mySearchCoordinatorSvc;
|
||||
|
||||
|
@ -1204,10 +1200,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
if (entity.getId() == null) {
|
||||
myEntityManager.persist(entity);
|
||||
|
||||
if (entity.getForcedId() != null) {
|
||||
myEntityManager.persist(entity.getForcedId());
|
||||
}
|
||||
|
||||
postPersist(entity, (T) theResource, theRequest);
|
||||
|
||||
} else if (entity.getDeleted() != null) {
|
||||
|
|
|
@ -44,14 +44,12 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
|||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictUtil;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseTag;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
|
@ -517,18 +515,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
String resourceIdBeforeStorage = theResource.getIdElement().getIdPart();
|
||||
boolean resourceHadIdBeforeStorage = isNotBlank(resourceIdBeforeStorage);
|
||||
boolean resourceIdWasServerAssigned =
|
||||
theResource.getUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED) == Boolean.TRUE;
|
||||
if (resourceHadIdBeforeStorage) {
|
||||
entity.setFhirId(resourceIdBeforeStorage);
|
||||
}
|
||||
boolean isClientAssignedId = storeNonPidResourceId(theResource, entity);
|
||||
|
||||
HookParams hookParams;
|
||||
|
||||
// Notify interceptor for accepting/rejecting client assigned ids
|
||||
if (!resourceIdWasServerAssigned && resourceHadIdBeforeStorage) {
|
||||
if (isClientAssignedId) {
|
||||
hookParams = new HookParams().add(IBaseResource.class, theResource).add(RequestDetails.class, theRequest);
|
||||
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_CLIENT_ASSIGNED_ID, hookParams);
|
||||
}
|
||||
|
@ -542,7 +534,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
doCallHooks(theTransactionDetails, theRequest, Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, hookParams);
|
||||
|
||||
if (resourceHadIdBeforeStorage && !resourceIdWasServerAssigned) {
|
||||
if (isClientAssignedId) {
|
||||
validateResourceIdCreation(theResource, theRequest);
|
||||
}
|
||||
|
||||
|
@ -569,31 +561,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
// Store the resource forced ID if necessary
|
||||
JpaPid jpaPid = JpaPid.fromId(updatedEntity.getResourceId());
|
||||
if (resourceHadIdBeforeStorage) {
|
||||
if (resourceIdWasServerAssigned) {
|
||||
boolean createForPureNumericIds = true;
|
||||
createForcedIdIfNeeded(entity, resourceIdBeforeStorage, createForPureNumericIds);
|
||||
} else {
|
||||
boolean createForPureNumericIds = getStorageSettings().getResourceClientIdStrategy()
|
||||
!= JpaStorageSettings.ClientIdStrategyEnum.ALPHANUMERIC;
|
||||
createForcedIdIfNeeded(entity, resourceIdBeforeStorage, createForPureNumericIds);
|
||||
}
|
||||
} else {
|
||||
switch (getStorageSettings().getResourceClientIdStrategy()) {
|
||||
case NOT_ALLOWED:
|
||||
case ALPHANUMERIC:
|
||||
break;
|
||||
case ANY:
|
||||
boolean createForPureNumericIds = true;
|
||||
createForcedIdIfNeeded(
|
||||
updatedEntity, theResource.getIdElement().getIdPart(), createForPureNumericIds);
|
||||
// for client ID mode ANY, we will always have a forced ID. If we ever
|
||||
// stop populating the transient forced ID be warned that we use it
|
||||
// (and expect it to be set correctly) farther below.
|
||||
assert updatedEntity.getTransientForcedId() != null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the resource with its actual final stored ID from the entity
|
||||
theResource.setId(entity.getIdDt());
|
||||
|
@ -601,7 +568,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
// Pre-cache the resource ID
|
||||
jpaPid.setAssociatedResourceId(entity.getIdType(myFhirContext));
|
||||
myIdHelperService.addResolvedPidToForcedId(
|
||||
jpaPid, theRequestPartitionId, getResourceName(), entity.getTransientForcedId(), null);
|
||||
jpaPid, theRequestPartitionId, getResourceName(), entity.getFhirId(), null);
|
||||
theTransactionDetails.addResolvedResourceId(jpaPid.getAssociatedResourceId(), jpaPid);
|
||||
theTransactionDetails.addResolvedResource(jpaPid.getAssociatedResourceId(), theResource);
|
||||
|
||||
|
@ -646,40 +613,34 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
return outcome;
|
||||
}
|
||||
|
||||
private void createForcedIdIfNeeded(
|
||||
ResourceTable theEntity, String theResourceId, boolean theCreateForPureNumericIds) {
|
||||
// TODO MB delete this in step 3
|
||||
if (isNotBlank(theResourceId) && theEntity.getForcedId() == null) {
|
||||
if (theCreateForPureNumericIds || !IdHelperService.isValidPid(theResourceId)) {
|
||||
ForcedId forcedId = new ForcedId();
|
||||
forcedId.setResourceType(theEntity.getResourceType());
|
||||
forcedId.setForcedId(theResourceId);
|
||||
forcedId.setResource(theEntity);
|
||||
forcedId.setPartitionId(theEntity.getPartitionId());
|
||||
|
||||
/*
|
||||
* As of Hibernate 5.6.2, assigning the forced ID to the
|
||||
* resource table causes an extra update to happen, even
|
||||
* though the ResourceTable entity isn't actually changed
|
||||
* (there is a @OneToOne reference on ResourceTable to the
|
||||
* ForcedId table, but the actual column is on the ForcedId
|
||||
* table so it doesn't actually make sense to update the table
|
||||
* when this is set). But to work around that we avoid
|
||||
* actually assigning ResourceTable#myForcedId here.
|
||||
/**
|
||||
* Check for an id on the resource and if so,
|
||||
* store it in ResourceTable.
|
||||
*
|
||||
* It's conceivable they may fix this in the future, or
|
||||
* they may not.
|
||||
* The fhirId property is either set here with the resource id
|
||||
* OR by hibernate once the PK is generated for a server-assigned id.
|
||||
*
|
||||
* If you want to try assigning the forced it to the resource
|
||||
* entity (by calling ResourceTable#setForcedId) try running
|
||||
* the tests FhirResourceDaoR4QueryCountTest to verify that
|
||||
* nothing has broken as a result.
|
||||
* JA 20220121
|
||||
* Used for both client-assigned id and for server-assigned UUIDs.
|
||||
*
|
||||
* @return true if this is a client-assigned id
|
||||
*
|
||||
* @see ca.uhn.fhir.jpa.model.entity.ResourceTable.FhirIdGenerator
|
||||
*/
|
||||
theEntity.setTransientForcedId(forcedId.getForcedId());
|
||||
myForcedIdDao.save(forcedId);
|
||||
}
|
||||
private boolean storeNonPidResourceId(T theResource, ResourceTable entity) {
|
||||
String resourceIdBeforeStorage = theResource.getIdElement().getIdPart();
|
||||
boolean resourceHadIdBeforeStorage = isNotBlank(resourceIdBeforeStorage);
|
||||
boolean resourceIdWasServerAssigned =
|
||||
theResource.getUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED) == Boolean.TRUE;
|
||||
|
||||
// We distinguish actual client-assigned ids from UUIDs which the server assigned.
|
||||
boolean isClientAssigned = resourceHadIdBeforeStorage && !resourceIdWasServerAssigned;
|
||||
|
||||
// But both need to be set on the entity fhirId field.
|
||||
if (resourceHadIdBeforeStorage) {
|
||||
entity.setFhirId(resourceIdBeforeStorage);
|
||||
}
|
||||
|
||||
return isClientAssigned;
|
||||
}
|
||||
|
||||
void validateResourceIdCreation(T theResource, RequestDetails theRequest) {
|
||||
|
@ -1891,7 +1852,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
throw new ResourceNotFoundException(Msg.code(1998) + theId);
|
||||
}
|
||||
validateGivenIdIsAppropriateToRetrieveResource(theId, entity);
|
||||
entity.setTransientForcedId(theId.getIdPart());
|
||||
return entity;
|
||||
}
|
||||
|
||||
|
@ -2634,9 +2594,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
private void validateGivenIdIsAppropriateToRetrieveResource(IIdType theId, BaseHasResource entity) {
|
||||
if (entity.getForcedId() != null) {
|
||||
if (getStorageSettings().getResourceClientIdStrategy() != JpaStorageSettings.ClientIdStrategyEnum.ANY) {
|
||||
if (theId.isIdPartValidLong()) {
|
||||
if (!entity.getIdDt().getIdPart().equals(theId.getIdPart())) {
|
||||
// This means that the resource with the given numeric ID exists, but it has a "forced ID", meaning
|
||||
// that
|
||||
// as far as the outside world is concerned, the given ID doesn't exist (it's just an internal
|
||||
|
@ -2646,8 +2604,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
throw new ResourceNotFoundException(Msg.code(2000) + theId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void validateResourceType(BaseHasResource entity) {
|
||||
validateResourceType(entity, myResourceName);
|
||||
|
|
|
@ -202,7 +202,12 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
*/
|
||||
if (ids.size() >= 2) {
|
||||
List<ResourceTable> loadedResourceTableEntries = new ArrayList<>();
|
||||
preFetchIndexes(ids, "forcedId", "myForcedId", loadedResourceTableEntries);
|
||||
|
||||
new QueryChunker<Long>()
|
||||
.chunk(
|
||||
ids,
|
||||
nextChunk ->
|
||||
loadedResourceTableEntries.addAll(myResourceTableDao.findAllById(nextChunk)));
|
||||
|
||||
List<Long> entityIds;
|
||||
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
/**
|
||||
* Legacy forced_id implementation.
|
||||
*
|
||||
* @deprecated we now have a fhir_id column directly on HFJ_RESOURCE.
|
||||
* No runtime code should query this table except for deletions by PK.
|
||||
* To be deleted in 2024 (zero-downtime).
|
||||
*/
|
||||
@Deprecated(since = "6.10")
|
||||
@Repository
|
||||
public interface IForcedIdDao extends JpaRepository<ForcedId, Long>, IHapiFhirJpaRepository {
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM ForcedId t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
}
|
|
@ -43,7 +43,6 @@ public interface IResourceLinkDao extends JpaRepository<ResourceLink, Long>, IHa
|
|||
* Loads a collection of ResourceLink entities by PID, but also eagerly fetches
|
||||
* the target resources and their forced IDs
|
||||
*/
|
||||
@Query(
|
||||
"SELECT t FROM ResourceLink t LEFT JOIN FETCH t.myTargetResource tr LEFT JOIN FETCH tr.myForcedId WHERE t.myId in :pids")
|
||||
@Query("SELECT t FROM ResourceLink t LEFT JOIN FETCH t.myTargetResource tr WHERE t.myId in :pids")
|
||||
List<ResourceLink> findByPidAndFetchTargetDetails(@Param("pids") List<Long> thePids);
|
||||
}
|
||||
|
|
|
@ -177,25 +177,26 @@ public interface IResourceTableDao
|
|||
@Query("SELECT t.myId, t.myResourceType, t.myVersion FROM ResourceTable t WHERE t.myId IN ( :pid )")
|
||||
Collection<Object[]> getResourceVersionsForPid(@Param("pid") List<Long> pid);
|
||||
|
||||
@Query(
|
||||
"SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid")
|
||||
@Query("SELECT t FROM ResourceTable t WHERE t.myPartitionId.myPartitionId IS NULL AND t.myId = :pid")
|
||||
Optional<ResourceTable> readByPartitionIdNull(@Param("pid") Long theResourceId);
|
||||
|
||||
@Query(
|
||||
"SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId = :partitionId AND t.myId = :pid")
|
||||
@Query("SELECT t FROM ResourceTable t WHERE t.myPartitionId.myPartitionId = :partitionId AND t.myId = :pid")
|
||||
Optional<ResourceTable> readByPartitionId(
|
||||
@Param("partitionId") int thePartitionId, @Param("pid") Long theResourceId);
|
||||
|
||||
@Query(
|
||||
"SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE (t.myPartitionId.myPartitionId IS NULL OR t.myPartitionId.myPartitionId IN (:partitionIds)) AND t.myId = :pid")
|
||||
"SELECT t FROM ResourceTable t WHERE (t.myPartitionId.myPartitionId IS NULL OR t.myPartitionId.myPartitionId IN (:partitionIds)) AND t.myId = :pid")
|
||||
Optional<ResourceTable> readByPartitionIdsOrNull(
|
||||
@Param("partitionIds") Collection<Integer> thrValues, @Param("pid") Long theResourceId);
|
||||
|
||||
@Query(
|
||||
"SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myPartitionId.myPartitionId IN (:partitionIds) AND t.myId = :pid")
|
||||
@Query("SELECT t FROM ResourceTable t WHERE t.myPartitionId.myPartitionId IN (:partitionIds) AND t.myId = :pid")
|
||||
Optional<ResourceTable> readByPartitionIds(
|
||||
@Param("partitionIds") Collection<Integer> thrValues, @Param("pid") Long theResourceId);
|
||||
|
||||
@Query("SELECT t FROM ResourceTable t LEFT JOIN FETCH t.myForcedId WHERE t.myId IN :pids")
|
||||
@Query("SELECT t FROM ResourceTable t WHERE t.myId IN :pids")
|
||||
List<ResourceTable> findAllByIdAndLoadForcedIds(@Param("pids") List<Long> thePids);
|
||||
|
||||
@Query("SELECT t FROM ResourceTable t where t.myResourceType = :restype and t.myFhirId = :fhirId")
|
||||
Optional<ResourceTable> findByTypeAndFhirId(
|
||||
@Param("restype") String theResourceName, @Param("fhirId") String theFhirId);
|
||||
}
|
||||
|
|
|
@ -53,9 +53,7 @@ public interface ITermValueSetDao extends JpaRepository<TermValueSet, Long>, IHa
|
|||
* The current TermValueSet is not necessarily the last uploaded anymore, but the current VS resource
|
||||
* is pointed by a specific ForcedId, so we locate current ValueSet as the one pointing to current VS resource
|
||||
*/
|
||||
@Query(
|
||||
value =
|
||||
"SELECT vs FROM ForcedId f, TermValueSet vs where f.myForcedId = :forcedId and vs.myResource = f.myResource")
|
||||
@Query(value = "SELECT vs FROM TermValueSet vs where vs.myResource.myFhirId = :forcedId ")
|
||||
Optional<TermValueSet> findTermValueSetByForcedId(@Param("forcedId") String theForcedId);
|
||||
|
||||
@Query("SELECT vs FROM TermValueSet vs WHERE vs.myUrl = :url AND vs.myVersion IS NULL")
|
||||
|
|
|
@ -47,7 +47,6 @@ import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
|||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.NpmPackageEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionResourceEntity;
|
||||
|
@ -174,7 +173,6 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
|
|||
expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class, requestPartitionId));
|
||||
counter.addAndGet(
|
||||
expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ForcedId.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(
|
||||
theRequest, ResourceIndexedSearchParamDate.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(
|
||||
|
|
|
@ -27,7 +27,6 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
|
||||
|
@ -46,7 +45,6 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
|
@ -79,9 +77,6 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid> {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(JpaResourceExpungeService.class);
|
||||
|
||||
@Autowired
|
||||
private IForcedIdDao myForcedIdDao;
|
||||
|
||||
@Autowired
|
||||
private IResourceTableDao myResourceTableDao;
|
||||
|
||||
|
@ -323,11 +318,6 @@ public class JpaResourceExpungeService implements IResourceExpungeService<JpaPid
|
|||
myResourceTagDao.deleteByResourceId(resource.getId());
|
||||
}
|
||||
|
||||
if (resource.getForcedId() != null) {
|
||||
ForcedId forcedId = resource.getForcedId();
|
||||
myForcedIdDao.deleteByPid(forcedId.getId());
|
||||
}
|
||||
|
||||
myResourceTableDao.deleteByPid(resource.getId());
|
||||
} catch (DataIntegrityViolationException e) {
|
||||
throw new PreconditionFailedException(Msg.code(2415)
|
||||
|
|
|
@ -44,7 +44,6 @@ public class ResourceTableFKProvider {
|
|||
retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
|
||||
|
||||
// These have the possibility of touching all resource types.
|
||||
retval.add(new ResourceForeignKey("HFJ_FORCED_ID", "RESOURCE_PID"));
|
||||
retval.add(new ResourceForeignKey("HFJ_IDX_CMP_STRING_UNIQ", "RES_ID"));
|
||||
retval.add(new ResourceForeignKey("HFJ_IDX_CMB_TOK_NU", "RES_ID"));
|
||||
retval.add(new ResourceForeignKey("HFJ_RES_LINK", "SRC_RESOURCE_ID"));
|
||||
|
@ -83,7 +82,6 @@ public class ResourceTableFKProvider {
|
|||
// These have the possibility of touching all resource types.
|
||||
retval.add(new ResourceForeignKey("HFJ_HISTORY_TAG", "RES_ID"));
|
||||
retval.add(new ResourceForeignKey("HFJ_RES_VER_PROV", "RES_PID"));
|
||||
retval.add(new ResourceForeignKey("HFJ_FORCED_ID", "RESOURCE_PID"));
|
||||
retval.add(new ResourceForeignKey("HFJ_IDX_CMP_STRING_UNIQ", "RES_ID"));
|
||||
retval.add(new ResourceForeignKey("HFJ_IDX_CMB_TOK_NU", "RES_ID"));
|
||||
retval.add(new ResourceForeignKey("HFJ_RES_LINK", "SRC_RESOURCE_ID"));
|
||||
|
|
|
@ -132,6 +132,13 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.modifyColumn("20240327.1", "TARGET_CODE")
|
||||
.nullable()
|
||||
.withType(ColumnTypeEnum.STRING, 500);
|
||||
|
||||
// Stop writing to hfj_forced_id https://github.com/hapifhir/hapi-fhir/pull/5817
|
||||
Builder.BuilderWithTableName forcedId = version.onTable("HFJ_FORCED_ID");
|
||||
forcedId.dropForeignKey("20240402.1", "FK_FORCEDID_RESOURCE", "HFJ_RESOURCE");
|
||||
forcedId.dropIndex("20240402.2", "IDX_FORCEDID_RESID");
|
||||
forcedId.dropIndex("20240402.3", "IDX_FORCEDID_TYPE_FID");
|
||||
forcedId.dropIndex("20240402.4", "IDX_FORCEID_FID");
|
||||
}
|
||||
|
||||
protected void init700() {
|
||||
|
@ -1403,9 +1410,9 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.addIndex("20211210.4", "FK_FORCEDID_RESOURCE")
|
||||
.unique(true)
|
||||
.withColumns("RESOURCE_PID")
|
||||
.doNothing() // This migration was added in error, as this table already has a unique constraint on
|
||||
// RESOURCE_PID and every database creates an index on anything that is unique.
|
||||
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
|
||||
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS)
|
||||
.doNothing(); // This migration was added in error, as this table already has a unique constraint on
|
||||
}
|
||||
|
||||
private void init570() {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
-- we can't use convering index until the autovacuum runs for those rows, which kills index performance
|
||||
-- we can't use covering index until the autovacuum runs for those rows, which kills index performance
|
||||
ALTER TABLE hfj_resource SET (autovacuum_vacuum_scale_factor = 0.01);
|
||||
ALTER TABLE hfj_forced_id SET (autovacuum_vacuum_scale_factor = 0.01);
|
||||
ALTER TABLE hfj_res_link SET (autovacuum_vacuum_scale_factor = 0.01);
|
||||
|
|
|
@ -29,7 +29,6 @@ import jakarta.persistence.Enumerated;
|
|||
import jakarta.persistence.MappedSuperclass;
|
||||
import jakarta.persistence.Temporal;
|
||||
import jakarta.persistence.TemporalType;
|
||||
import jakarta.persistence.Transient;
|
||||
import org.hibernate.annotations.OptimisticLock;
|
||||
|
||||
import java.util.Collection;
|
||||
|
@ -65,22 +64,6 @@ public abstract class BaseHasResource extends BasePartitionable
|
|||
@OptimisticLock(excluded = true)
|
||||
private Date myUpdated;
|
||||
|
||||
/**
|
||||
* This is stored as an optimization to avoid needing to query for this
|
||||
* after an update
|
||||
*/
|
||||
@Transient
|
||||
// TODO MB forced_id delete this in step 3
|
||||
private transient String myTransientForcedId;
|
||||
|
||||
public String getTransientForcedId() {
|
||||
return myTransientForcedId;
|
||||
}
|
||||
|
||||
public void setTransientForcedId(String theTransientForcedId) {
|
||||
myTransientForcedId = theTransientForcedId;
|
||||
}
|
||||
|
||||
public abstract BaseTag addTag(TagDefinition theDef);
|
||||
|
||||
@Override
|
||||
|
@ -97,13 +80,6 @@ public abstract class BaseHasResource extends BasePartitionable
|
|||
myFhirVersion = theFhirVersion;
|
||||
}
|
||||
|
||||
public abstract ForcedId getForcedId();
|
||||
|
||||
public abstract void setForcedId(ForcedId theForcedId);
|
||||
|
||||
@Override
|
||||
public abstract Long getId();
|
||||
|
||||
public void setDeleted(Date theDate) {
|
||||
myDeleted = theDate;
|
||||
}
|
||||
|
@ -129,12 +105,6 @@ public abstract class BaseHasResource extends BasePartitionable
|
|||
myPublished = thePublished.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract Long getResourceId();
|
||||
|
||||
@Override
|
||||
public abstract String getResourceType();
|
||||
|
||||
public abstract Collection<? extends BaseTag> getTags();
|
||||
|
||||
@Override
|
||||
|
@ -151,9 +121,6 @@ public abstract class BaseHasResource extends BasePartitionable
|
|||
myUpdated = theUpdated;
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract long getVersion();
|
||||
|
||||
@Override
|
||||
public boolean isHasTags() {
|
||||
return myHasTags;
|
||||
|
|
|
@ -21,51 +21,24 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ForeignKey;
|
||||
import jakarta.persistence.GeneratedValue;
|
||||
import jakarta.persistence.GenerationType;
|
||||
import jakarta.persistence.Id;
|
||||
import jakarta.persistence.Index;
|
||||
import jakarta.persistence.JoinColumn;
|
||||
import jakarta.persistence.OneToOne;
|
||||
import jakarta.persistence.SequenceGenerator;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.persistence.UniqueConstraint;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.annotations.ColumnDefault;
|
||||
|
||||
/**
|
||||
* The old way we handled client-assigned resource ids.
|
||||
* Replaced by {@link ResourceTable#myFhirId}.
|
||||
* @deprecated This is unused, and only kept for history and upgrade migration testing.
|
||||
*/
|
||||
@Entity()
|
||||
@Table(
|
||||
name = ForcedId.HFJ_FORCED_ID,
|
||||
uniqueConstraints = {
|
||||
@UniqueConstraint(
|
||||
name = "IDX_FORCEDID_RESID",
|
||||
columnNames = {"RESOURCE_PID"}),
|
||||
|
||||
/*
|
||||
* This index is called IDX_FORCEDID_TYPE_FID and guarantees
|
||||
* uniqueness of RESOURCE_TYPE,FORCED_ID. This doesn't make sense
|
||||
* for partitioned servers, so we replace it on those servers
|
||||
* with IDX_FORCEDID_TYPE_PFID covering
|
||||
* PARTITION_ID,RESOURCE_TYPE,FORCED_ID
|
||||
*/
|
||||
@UniqueConstraint(
|
||||
name = ForcedId.IDX_FORCEDID_TYPE_FID,
|
||||
columnNames = {"RESOURCE_TYPE", "FORCED_ID"})
|
||||
},
|
||||
indexes = {
|
||||
/*
|
||||
* NB: We previously had indexes named
|
||||
* - IDX_FORCEDID_TYPE_FORCEDID
|
||||
* - IDX_FORCEDID_TYPE_RESID
|
||||
* so don't reuse these names
|
||||
*/
|
||||
@Index(name = "IDX_FORCEID_FID", columnList = "FORCED_ID"),
|
||||
// @Index(name = "IDX_FORCEID_RESID", columnList = "RESOURCE_PID"),
|
||||
})
|
||||
public class ForcedId extends BasePartitionable {
|
||||
@Table(name = ForcedId.HFJ_FORCED_ID)
|
||||
@Deprecated(since = "7.1", forRemoval = true)
|
||||
class ForcedId extends BasePartitionable {
|
||||
|
||||
public static final int MAX_FORCED_ID_LENGTH = 100;
|
||||
public static final String IDX_FORCEDID_TYPE_FID = "IDX_FORCEDID_TYPE_FID";
|
||||
|
@ -80,14 +53,6 @@ public class ForcedId extends BasePartitionable {
|
|||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
|
||||
@JoinColumn(
|
||||
name = "RESOURCE_PID",
|
||||
nullable = false,
|
||||
updatable = false,
|
||||
foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE"))
|
||||
@OneToOne(fetch = FetchType.LAZY)
|
||||
private ResourceTable myResource;
|
||||
|
||||
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable = false)
|
||||
private Long myResourcePid;
|
||||
|
||||
|
@ -112,10 +77,6 @@ public class ForcedId extends BasePartitionable {
|
|||
myForcedId = theForcedId;
|
||||
}
|
||||
|
||||
public void setResource(ResourceTable theResource) {
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
public String getResourceType() {
|
||||
return myResourceType;
|
||||
}
|
||||
|
|
|
@ -22,7 +22,26 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import jakarta.persistence.*;
|
||||
import jakarta.persistence.CascadeType;
|
||||
import jakarta.persistence.Column;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.EnumType;
|
||||
import jakarta.persistence.Enumerated;
|
||||
import jakarta.persistence.FetchType;
|
||||
import jakarta.persistence.ForeignKey;
|
||||
import jakarta.persistence.GeneratedValue;
|
||||
import jakarta.persistence.GenerationType;
|
||||
import jakarta.persistence.Id;
|
||||
import jakarta.persistence.Index;
|
||||
import jakarta.persistence.JoinColumn;
|
||||
import jakarta.persistence.Lob;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.OneToMany;
|
||||
import jakarta.persistence.OneToOne;
|
||||
import jakarta.persistence.SequenceGenerator;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.persistence.Transient;
|
||||
import jakarta.persistence.UniqueConstraint;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.Length;
|
||||
|
@ -111,6 +130,12 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
|
||||
@Transient
|
||||
private transient ResourceHistoryProvenanceEntity myNewHistoryProvenanceEntity;
|
||||
/**
|
||||
* This is stored as an optimization to avoid needing to fetch ResourceTable
|
||||
* to access the resource id.
|
||||
*/
|
||||
@Transient
|
||||
private transient String myTransientForcedId;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -280,16 +305,6 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
return new IdDt(getResourceType() + '/' + resourceIdPart + '/' + Constants.PARAM_HISTORY + '/' + getVersion());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ForcedId getForcedId() {
|
||||
return getResourceTable().getForcedId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setForcedId(ForcedId theForcedId) {
|
||||
getResourceTable().setForcedId(theForcedId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if there is a populated resource text (i.e.
|
||||
* either {@link #getResource()} or {@link #getResourceTextVc()} return a non null
|
||||
|
@ -311,4 +326,12 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
}
|
||||
return myNewHistoryProvenanceEntity;
|
||||
}
|
||||
|
||||
public String getTransientForcedId() {
|
||||
return myTransientForcedId;
|
||||
}
|
||||
|
||||
public void setTransientForcedId(String theTransientForcedId) {
|
||||
myTransientForcedId = theTransientForcedId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,6 @@ import jakarta.persistence.Id;
|
|||
import jakarta.persistence.Index;
|
||||
import jakarta.persistence.NamedEntityGraph;
|
||||
import jakarta.persistence.OneToMany;
|
||||
import jakarta.persistence.OneToOne;
|
||||
import jakarta.persistence.PostPersist;
|
||||
import jakarta.persistence.PrePersist;
|
||||
import jakarta.persistence.PreUpdate;
|
||||
|
@ -420,15 +419,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
@Transient
|
||||
private transient boolean myVersionUpdatedInCurrentTransaction;
|
||||
|
||||
@OneToOne(
|
||||
optional = true,
|
||||
fetch = FetchType.EAGER,
|
||||
cascade = {},
|
||||
orphanRemoval = false,
|
||||
mappedBy = "myResource")
|
||||
@OptimisticLock(excluded = true)
|
||||
private ForcedId myForcedId;
|
||||
|
||||
@Transient
|
||||
private volatile String myCreatedByMatchUrl;
|
||||
|
||||
|
@ -889,10 +879,9 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
|
||||
retVal.setResourceId(myId);
|
||||
retVal.setResourceType(myResourceType);
|
||||
retVal.setTransientForcedId(getTransientForcedId());
|
||||
retVal.setTransientForcedId(getFhirId());
|
||||
retVal.setFhirVersion(getFhirVersion());
|
||||
retVal.setResourceTable(this);
|
||||
retVal.setForcedId(getForcedId());
|
||||
retVal.setPartitionId(getPartitionId());
|
||||
|
||||
retVal.setHasTags(isHasTags());
|
||||
|
@ -923,6 +912,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
public String toString() {
|
||||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
b.append("pid", myId);
|
||||
b.append("fhirId", myFhirId);
|
||||
b.append("resourceType", myResourceType);
|
||||
b.append("version", myVersion);
|
||||
if (getPartitionId() != null) {
|
||||
|
@ -970,16 +960,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
return JpaPid.fromId(getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ForcedId getForcedId() {
|
||||
return myForcedId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setForcedId(ForcedId theForcedId) {
|
||||
myForcedId = theForcedId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IdDt getIdDt() {
|
||||
IdDt retVal = new IdDt();
|
||||
|
@ -997,10 +977,6 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
String resourceId;
|
||||
if (myFhirId != null && !myFhirId.isEmpty()) {
|
||||
resourceId = myFhirId;
|
||||
} else if (getTransientForcedId() != null) {
|
||||
resourceId = getTransientForcedId();
|
||||
} else if (myForcedId != null) {
|
||||
resourceId = myForcedId.getForcedId();
|
||||
} else {
|
||||
Long id = this.getResourceId();
|
||||
resourceId = Long.toString(id);
|
||||
|
|
|
@ -25,17 +25,15 @@ public class ResourceTableTest {
|
|||
|
||||
@ParameterizedTest
|
||||
@CsvSource(value={
|
||||
"123, null, Patient/123/_history/1",
|
||||
"123, 123, Patient/123/_history/1",
|
||||
", 123, Patient/123/_history/1",
|
||||
"null, 456, Patient/456/_history/1"
|
||||
"123, 456, Patient/456/_history/1"
|
||||
},nullValues={"null"})
|
||||
public void testPopulateId(String theFhirId, String theForcedId, String theExpected) {
|
||||
public void testPopulateId(Long theResId, String theFhirId, String theExpected) {
|
||||
// Given
|
||||
ResourceTable t = new ResourceTable();
|
||||
t.setId(theResId);
|
||||
t.setFhirId(theFhirId);
|
||||
ForcedId forcedId = new ForcedId();
|
||||
forcedId.setForcedId(theForcedId);
|
||||
t.setForcedId(forcedId);
|
||||
t.setResourceType(new Patient().getResourceType().name());
|
||||
t.setVersionForUnitTest(1);
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ import ca.uhn.fhir.jpa.api.model.HistoryCountModeEnum;
|
|||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseStorageDao;
|
||||
import ca.uhn.fhir.jpa.dao.DaoTestUtils;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
|
@ -82,7 +81,6 @@ import org.junit.jupiter.api.AfterEach;
|
|||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -112,8 +110,6 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2Test.class);
|
||||
@Autowired
|
||||
private IForcedIdDao myForcedIdDao;
|
||||
|
||||
@AfterEach
|
||||
public final void after() {
|
||||
|
@ -995,9 +991,7 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
|||
idv2 = myPatientDao.update(patient, mySrd).getId();
|
||||
}
|
||||
|
||||
runInTransaction(() -> {
|
||||
ourLog.info("Forced IDs:\n{}", myForcedIdDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n")));
|
||||
});
|
||||
logAllResources();
|
||||
|
||||
List<Patient> patients = toList(myPatientDao.history(idv1.toVersionless(), null, null, null, mySrd));
|
||||
assertTrue(patients.size() == 2);
|
||||
|
|
|
@ -626,16 +626,9 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
|||
.getSingleResult();
|
||||
assertNotNull(readBackView, "found search view");
|
||||
|
||||
// verify the forced id join still works
|
||||
if (readBackResource.getForcedId() != null) {
|
||||
assertEquals(myExpectedId, readBackResource.getForcedId().getForcedId(),
|
||||
"legacy join populated");
|
||||
assertEquals(myExpectedId, readBackView.getFhirId(),
|
||||
"legacy join populated");
|
||||
} else {
|
||||
assertEquals(IdStrategyEnum.SEQUENTIAL_NUMERIC, theServerIdStrategy,
|
||||
"hfj_forced_id join column is only empty when using server-assigned ids");
|
||||
} }
|
||||
"fhir_id populated");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -190,7 +190,7 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
|
|||
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, false),
|
||||
either(containsString("rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B' and rt1_0.PARTITION_ID is null or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A' and rt1_0.PARTITION_ID is null"))
|
||||
.or(containsString("rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='A' and rt1_0.PARTITION_ID is null or rt1_0.RES_TYPE='Patient' and rt1_0.FHIR_ID='B' and rt1_0.PARTITION_ID is null")));
|
||||
assertEquals(52, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(50, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countCommits());
|
||||
|
|
|
@ -19,7 +19,6 @@ import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
|||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.search.ResourceSearchUrlSvc;
|
||||
|
@ -227,7 +226,8 @@ class BaseHapiFhirResourceDaoTest {
|
|||
RequestPartitionId partitionId = Mockito.mock(RequestPartitionId.class);
|
||||
JpaPid jpaPid = JpaPid.fromIdAndVersion(123L, 1L);
|
||||
ResourceTable entity = new ResourceTable();
|
||||
entity.setForcedId(new ForcedId());
|
||||
entity.setId(123L);
|
||||
entity.setFhirId("456");
|
||||
|
||||
// mock
|
||||
when(myRequestPartitionHelperSvc.determineReadPartitionForRequestForRead(
|
||||
|
|
|
@ -7,7 +7,6 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
|
@ -30,6 +29,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static ca.uhn.fhir.jpa.model.entity.ResourceTable.IDX_RES_TYPE_FHIR_ID;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
@ -61,13 +61,6 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
mySrdInterceptorService.unregisterInterceptorsIf(t -> t instanceof MyReadWriteInterceptor);
|
||||
|
||||
if (myHaveDroppedForcedIdUniqueConstraint) {
|
||||
runInTransaction(() -> {
|
||||
myEntityManager.createNativeQuery("delete from HFJ_FORCED_ID").executeUpdate();
|
||||
myEntityManager.createNativeQuery("alter table HFJ_FORCED_ID add constraint IDX_FORCEDID_TYPE_FID unique (RESOURCE_TYPE, FORCED_ID)");
|
||||
});
|
||||
}
|
||||
|
||||
myStorageSettings.setIndexMissingFields(new JpaStorageSettings().getIndexMissingFields());
|
||||
myStorageSettings.setAutoCreatePlaceholderReferenceTargets(new JpaStorageSettings().isAutoCreatePlaceholderReferenceTargets());
|
||||
myStorageSettings.setMassIngestionMode(new JpaStorageSettings().isMassIngestionMode());
|
||||
|
@ -106,6 +99,18 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPurgeDatabase() {
|
||||
super.afterPurgeDatabase();
|
||||
|
||||
if (myHaveDroppedForcedIdUniqueConstraint) {
|
||||
runInTransaction(() -> {
|
||||
myEntityManager.createNativeQuery("delete from HFJ_RESOURCE").executeUpdate();
|
||||
myEntityManager.createNativeQuery("alter table " + ResourceTable.HFJ_RESOURCE +
|
||||
" add constraint " + IDX_RES_TYPE_FHIR_ID + " unique (RES_TYPE, FHIR_ID)").executeUpdate();
|
||||
});
|
||||
}
|
||||
}
|
||||
protected void createUniqueCompositeSp() {
|
||||
addCreateDefaultPartition();
|
||||
addReadDefaultPartition(); // one for search param validation
|
||||
|
@ -137,8 +142,7 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
protected void dropForcedIdUniqueConstraint() {
|
||||
runInTransaction(() -> {
|
||||
myEntityManager.createNativeQuery("alter table " + ForcedId.HFJ_FORCED_ID + " drop constraint " + ForcedId.IDX_FORCEDID_TYPE_FID).executeUpdate();
|
||||
myEntityManager.createNativeQuery("alter table " + ResourceTable.HFJ_RESOURCE + " drop constraint " + ResourceTable.IDX_RES_TYPE_FHIR_ID).executeUpdate();
|
||||
myEntityManager.createNativeQuery("alter table " + ResourceTable.HFJ_RESOURCE + " drop constraint " + IDX_RES_TYPE_FHIR_ID).executeUpdate();
|
||||
});
|
||||
myHaveDroppedForcedIdUniqueConstraint = true;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@ import ca.uhn.fhir.jpa.dao.data.ISearchParamPresentDao;
|
|||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum;
|
||||
import ca.uhn.fhir.jpa.interceptor.ForceOffsetSearchModeInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
|
@ -47,6 +46,7 @@ import ca.uhn.fhir.test.utilities.ProxyUtil;
|
|||
import ca.uhn.fhir.test.utilities.server.HashMapResourceProviderExtension;
|
||||
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -90,7 +90,6 @@ import org.springframework.data.domain.PageRequest;
|
|||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.util.comparator.ComparableComparator;
|
||||
|
||||
import jakarta.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -221,7 +220,6 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
|
||||
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), not(empty())));
|
||||
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), not(empty())));
|
||||
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), not(empty())));
|
||||
|
||||
logAllResources();
|
||||
|
||||
|
@ -242,11 +240,10 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(47, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(85, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(80, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), empty()));
|
||||
|
||||
}
|
||||
|
||||
|
@ -328,7 +325,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
Group group = createGroup(patientList.subList(0, initialPatientsCount));
|
||||
|
||||
assertQueryCount(31, 0, 4, 0);
|
||||
assertQueryCount(31, 0, 3, 0);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
group = updateGroup(group, patientList.subList(initialPatientsCount, allPatientsCount));
|
||||
|
@ -350,7 +347,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
Group group = createGroup(patientList);
|
||||
|
||||
assertQueryCount(31, 0, 4, 0);
|
||||
assertQueryCount(31, 0, 3, 0);
|
||||
|
||||
// Make a change to the group, but don't touch any references in it
|
||||
myCaptureQueriesListener.clear();
|
||||
|
@ -672,7 +669,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(4, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
assertEquals(3, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
|
||||
|
@ -705,24 +702,20 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(4, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
assertEquals(3, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
|
||||
runInTransaction(() -> {
|
||||
List<ForcedId> allForcedIds = myForcedIdDao.findAll();
|
||||
for (ForcedId next : allForcedIds) {
|
||||
assertNotNull(next.getResourceId());
|
||||
assertNotNull(next.getForcedId());
|
||||
}
|
||||
|
||||
List<ResourceTable> resources = myResourceTableDao.findAll();
|
||||
String versions = "Resource Versions:\n * " + resources.stream().map(t -> "Resource " + t.getIdDt() + " has version: " + t.getVersion()).collect(Collectors.joining("\n * "));
|
||||
|
||||
for (ResourceTable next : resources) {
|
||||
assertEquals(1, next.getVersion(), versions);
|
||||
assertNotNull(next.getResourceId());
|
||||
assertNotNull(next.getFhirId());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -771,22 +764,18 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(4, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
assertEquals(3, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
|
||||
runInTransaction(() -> {
|
||||
List<ForcedId> allForcedIds = myForcedIdDao.findAll();
|
||||
for (ForcedId next : allForcedIds) {
|
||||
assertNotNull(next.getResourceId());
|
||||
assertNotNull(next.getForcedId());
|
||||
}
|
||||
|
||||
List<ResourceTable> resources = myResourceTableDao.findAll();
|
||||
String versions = "Resource Versions:\n * " + resources.stream().map(t -> "Resource " + t.getIdDt() + " has version: " + t.getVersion()).collect(Collectors.joining("\n * "));
|
||||
|
||||
for (ResourceTable next : resources) {
|
||||
assertEquals(1, next.getVersion(), versions);
|
||||
assertNotNull(next.getResourceId());
|
||||
assertNotNull(next.getFhirId());
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -819,7 +808,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(4, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
assertEquals(3, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
@ -856,7 +845,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
withFamily("Family" + i),
|
||||
withTag("http://foo", "blah"));
|
||||
}
|
||||
List<TypedPidJson> pids = runInTransaction(() -> myForcedIdDao
|
||||
List<TypedPidJson> pids = runInTransaction(() -> myResourceTableDao
|
||||
.findAll()
|
||||
.stream()
|
||||
.map(t -> new TypedPidJson(t.getResourceType(), Long.toString(t.getResourceId())))
|
||||
|
@ -874,7 +863,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(29, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(28, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(10, outcome.getRecordsProcessed());
|
||||
runInTransaction(()-> assertEquals(0, myResourceTableDao.count()));
|
||||
}
|
||||
|
@ -1839,7 +1828,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(21, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(18, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
@ -1925,7 +1914,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
// Search for IDs and Search for tag definition
|
||||
assertEquals(3, myCaptureQueriesListener.countSelectQueries());
|
||||
myCaptureQueriesListener.logInsertQueries();
|
||||
assertEquals(29, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(26, myCaptureQueriesListener.countInsertQueries());
|
||||
myCaptureQueriesListener.logUpdateQueries();
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
@ -2453,7 +2442,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(7, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(6, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
@ -3000,7 +2989,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
assertEquals(17, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(6607, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(6189, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(418, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countCommits());
|
||||
|
@ -3368,7 +3357,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(new SystemRequestDetails(), supplier.get());
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(30, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(29, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
|
@ -3451,7 +3440,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseResourceProviderR4Test
|
|||
mySystemDao.transaction(new SystemRequestDetails(), loadResourceFromClasspath(Bundle.class, "r4/transaction-perf-bundle.json"));
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(125, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(120, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
|
|
@ -31,9 +31,7 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
|||
import ca.uhn.fhir.test.utilities.ProxyUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.checkerframework.checker.units.qual.A;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.BodyStructure;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
|
@ -1238,12 +1236,11 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
myPatientDao.update(p).getId().toUnqualifiedVersionless();
|
||||
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(4, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myResourceTableDao.count());
|
||||
assertEquals(1, myResourceHistoryTableDao.count());
|
||||
assertEquals(1, myForcedIdDao.count());
|
||||
assertEquals(1, myResourceIndexedSearchParamTokenDao.count());
|
||||
});
|
||||
|
||||
|
@ -1264,7 +1261,6 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
runInTransaction(() -> {
|
||||
assertEquals(1, myResourceTableDao.count());
|
||||
assertEquals(2, myResourceHistoryTableDao.count());
|
||||
assertEquals(1, myForcedIdDao.count());
|
||||
assertEquals(1, myResourceIndexedSearchParamTokenDao.count());
|
||||
});
|
||||
|
||||
|
|
|
@ -15,7 +15,6 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
|||
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag;
|
||||
|
@ -589,13 +588,13 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
myPatientDao.update(p, mySrd);
|
||||
|
||||
runInTransaction(() -> {
|
||||
// HFJ_FORCED_ID
|
||||
List<ForcedId> forcedIds = myForcedIdDao.findAll();
|
||||
assertEquals(2, forcedIds.size());
|
||||
assertEquals(myPartitionId, forcedIds.get(0).getPartitionId().getPartitionId().intValue());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, forcedIds.get(0).getPartitionId().getPartitionDate());
|
||||
assertEquals(myPartitionId, forcedIds.get(1).getPartitionId().getPartitionId().intValue());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, forcedIds.get(1).getPartitionId().getPartitionDate());
|
||||
ResourceTable orgResourceTable = myResourceTableDao.findByTypeAndFhirId("Organization", "org").orElseThrow(IllegalArgumentException::new);
|
||||
assertEquals(myPartitionId, orgResourceTable.getPartitionId().getPartitionId().intValue());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, orgResourceTable.getPartitionId().getPartitionDate());
|
||||
|
||||
ResourceTable patientResourceTable = myResourceTableDao.findByTypeAndFhirId("Patient", "pat").orElseThrow(IllegalArgumentException::new);
|
||||
assertEquals(myPartitionId, patientResourceTable.getPartitionId().getPartitionId().intValue());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, patientResourceTable.getPartitionId().getPartitionDate());
|
||||
});
|
||||
|
||||
}
|
||||
|
@ -615,11 +614,11 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
myPatientDao.update(p, mySrd);
|
||||
|
||||
runInTransaction(() -> {
|
||||
// HFJ_FORCED_ID
|
||||
List<ForcedId> forcedIds = myForcedIdDao.findAll();
|
||||
assertEquals(2, forcedIds.size());
|
||||
assertEquals(null, forcedIds.get(0).getPartitionId());
|
||||
assertEquals(null, forcedIds.get(1).getPartitionId());
|
||||
ResourceTable orgResourceTable = myResourceTableDao.findByTypeAndFhirId("Organization", "org").orElseThrow(IllegalArgumentException::new);
|
||||
assertNull(orgResourceTable.getPartitionId());
|
||||
|
||||
ResourceTable patientResourceTable = myResourceTableDao.findByTypeAndFhirId("Patient", "pat").orElseThrow(IllegalArgumentException::new);
|
||||
assertNull(patientResourceTable.getPartitionId());
|
||||
});
|
||||
|
||||
}
|
||||
|
@ -639,13 +638,13 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
myPatientDao.update(p, mySrd);
|
||||
|
||||
runInTransaction(() -> {
|
||||
// HFJ_FORCED_ID
|
||||
List<ForcedId> forcedIds = myForcedIdDao.findAll();
|
||||
assertEquals(2, forcedIds.size());
|
||||
assertEquals(null, forcedIds.get(0).getPartitionId().getPartitionId());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, forcedIds.get(0).getPartitionId().getPartitionDate());
|
||||
assertEquals(null, forcedIds.get(1).getPartitionId().getPartitionId());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, forcedIds.get(1).getPartitionId().getPartitionDate());
|
||||
ResourceTable orgResourceTable = myResourceTableDao.findByTypeAndFhirId("Organization", "org").orElseThrow(IllegalArgumentException::new);
|
||||
assertNull(orgResourceTable.getPartitionId().getPartitionId());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, orgResourceTable.getPartitionId().getPartitionDate());
|
||||
|
||||
ResourceTable patientResourceTable = myResourceTableDao.findByTypeAndFhirId("Patient", "pat").orElseThrow(IllegalArgumentException::new);
|
||||
assertNull(patientResourceTable.getPartitionId().getPartitionId());
|
||||
assertLocalDateFromDbMatches(myPartitionDate, patientResourceTable.getPartitionId().getPartitionDate());
|
||||
});
|
||||
|
||||
}
|
||||
|
@ -876,8 +875,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
|
||||
// Only the read columns should be used, no criteria use partition
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID,"));
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
}
|
||||
{
|
||||
addReadAllPartitions();
|
||||
|
@ -888,8 +887,8 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
|
||||
// Only the read columns should be used, no criteria use partition
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID,"));
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -910,7 +909,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
|
||||
// Only the read columns should be used, no criteria use partition
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID='1'"), searchSql);
|
||||
}
|
||||
|
||||
|
@ -954,7 +953,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
|
||||
// Only the read columns should be used, but no selectors on partition ID
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID in ("), searchSql);
|
||||
}
|
||||
|
||||
|
@ -967,7 +966,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
|
||||
// Only the read columns should be used, but no selectors on partition ID
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID is null"), searchSql);
|
||||
}
|
||||
|
||||
|
@ -1008,7 +1007,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
|
||||
// Only the read columns should be used, but no selectors on partition ID
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID in ("), searchSql);
|
||||
}
|
||||
|
||||
|
@ -1022,7 +1021,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
|
||||
// Only the read columns should be used, but no selectors on partition ID
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"), searchSql);
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID is null"), searchSql);
|
||||
}
|
||||
|
||||
|
@ -1064,7 +1063,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
ourLog.info("Search SQL:\n{}", searchSql);
|
||||
|
||||
// Only the read columns should be used, no criteria use partition
|
||||
assertEquals(2, StringUtils.countMatches(searchSql, "PARTITION_ID,"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID,"));
|
||||
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID is null"));
|
||||
}
|
||||
|
||||
|
@ -2899,7 +2898,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
assertEquals(18, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(6607, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(6189, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(418, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countCommits());
|
||||
|
|
|
@ -27,8 +27,6 @@ import ca.uhn.fhir.util.MultimapCollector;
|
|||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
|
@ -47,7 +45,6 @@ import org.junit.jupiter.api.AfterEach;
|
|||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.SpyBean;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -64,8 +61,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
|
|||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Test {
|
||||
public static final int ALTERNATE_DEFAULT_ID = -1;
|
||||
|
@ -355,7 +350,6 @@ public class PatientIdPartitionInterceptorTest extends BaseResourceProviderR4Tes
|
|||
org.setName("name 2");
|
||||
|
||||
logAllResources();
|
||||
logAllForcedIds();
|
||||
|
||||
myOrganizationDao.update(org);
|
||||
|
||||
|
|
|
@ -18,16 +18,15 @@ import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.model.util.UcumServiceUtil;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaSearchFirstPageBundleProvider;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
|
@ -379,7 +378,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), not(empty())));
|
||||
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), not(empty())));
|
||||
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), not(empty())));
|
||||
|
||||
myPatientDao.expunge(new ExpungeOptions()
|
||||
.setExpungeDeletedResources(true)
|
||||
|
@ -387,7 +385,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), empty()));
|
||||
|
||||
}
|
||||
|
||||
|
@ -409,7 +406,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), not(empty())));
|
||||
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), not(empty())));
|
||||
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), not(empty())));
|
||||
|
||||
// Test
|
||||
myCaptureQueriesListener.clear();
|
||||
|
@ -421,11 +417,10 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(9, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
assertEquals(8, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
||||
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), empty()));
|
||||
|
||||
}
|
||||
|
||||
|
@ -749,7 +744,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
.setExpungeOldVersions(true), null);
|
||||
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), empty()));
|
||||
|
||||
// Create again with the same forced ID
|
||||
p = new Patient();
|
||||
|
@ -788,7 +782,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
.setExpungeOldVersions(true), null);
|
||||
runInTransaction(() -> assertThat(myResourceTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myResourceHistoryTableDao.findAll(), empty()));
|
||||
runInTransaction(() -> assertThat(myForcedIdDao.findAll(), empty()));
|
||||
|
||||
}
|
||||
|
||||
|
@ -1061,6 +1054,5 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
|
|||
assertThat(myTermConceptDao.findAll(), empty());
|
||||
assertThat(myResourceTableDao.findAll(), empty());
|
||||
assertThat(myResourceHistoryTableDao.findAll(), empty());
|
||||
assertThat(myForcedIdDao.findAll(), empty());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@ import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
|
|||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboStringUniqueDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamDateDao;
|
||||
|
@ -64,6 +63,7 @@ import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
|
|||
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -124,7 +124,6 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
|||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
@ -280,8 +279,6 @@ public abstract class BaseJpaR4BTest extends BaseJpaTest implements ITestDataBui
|
|||
@Autowired
|
||||
protected IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
@Qualifier("myCoverageDaoR4B")
|
||||
protected IFhirResourceDao<Coverage> myCoverageDao;
|
||||
@Autowired
|
||||
|
|
|
@ -19,7 +19,6 @@ import ca.uhn.fhir.jpa.binary.interceptor.BinaryStorageInterceptor;
|
|||
import ca.uhn.fhir.jpa.binary.provider.BinaryAccessProvider;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportJobSchedulingHelper;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao;
|
||||
|
@ -72,6 +71,7 @@ import ca.uhn.fhir.rest.server.BasePagingProvider;
|
|||
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
|
||||
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
|
||||
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r5.model.AllergyIntolerance;
|
||||
|
@ -134,7 +134,6 @@ import org.springframework.test.util.AopTestUtils;
|
|||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -303,8 +302,6 @@ public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuil
|
|||
@Autowired
|
||||
protected IResourceHistoryProvenanceDao myResourceHistoryProvenanceDao;
|
||||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
@Qualifier("myCoverageDaoR5")
|
||||
protected IFhirResourceDao<Coverage> myCoverageDao;
|
||||
@Autowired
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
package ca.uhn.fhir.jpa.test;
|
||||
|
||||
import ca.uhn.fhir.batch2.jobs.export.BulkDataExportProvider;
|
||||
import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
|
||||
import ca.uhn.fhir.batch2.model.JobInstance;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
|
@ -45,7 +43,6 @@ import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
|||
import ca.uhn.fhir.jpa.dao.TestDaoSearch;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2JobInstanceRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IBatch2WorkChunkRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IMdmLinkJpaRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IPartitionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryProvenanceDao;
|
||||
|
@ -116,6 +113,7 @@ import ca.uhn.fhir.test.utilities.ITestDataBuilder;
|
|||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
|
||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -208,7 +206,6 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
|||
import org.springframework.test.util.AopTestUtils;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import jakarta.persistence.EntityManager;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
|
|
@ -36,7 +36,6 @@ import ca.uhn.fhir.jpa.config.JpaConfig;
|
|||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedComboTokensNonUniqueDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamCoordsDao;
|
||||
|
@ -61,7 +60,6 @@ import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
|||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedComboTokenNonUnique;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords;
|
||||
|
@ -99,6 +97,8 @@ import ca.uhn.fhir.util.ClasspathUtil;
|
|||
import ca.uhn.fhir.util.FhirVersionIndependentConcept;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
@ -129,8 +129,6 @@ import org.springframework.transaction.TransactionStatus;
|
|||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import jakarta.annotation.Nonnull;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import java.io.IOException;
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
|
@ -257,8 +255,6 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
@Autowired
|
||||
private IResourceHistoryTableDao myResourceHistoryTableDao;
|
||||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
@Autowired
|
||||
private DaoRegistry myDaoRegistry;
|
||||
private final List<Object> myRegisteredInterceptors = new ArrayList<>(1);
|
||||
|
||||
|
@ -517,14 +513,6 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
});
|
||||
}
|
||||
|
||||
protected int logAllForcedIds() {
|
||||
return runInTransaction(() -> {
|
||||
List<ForcedId> forcedIds = myForcedIdDao.findAll();
|
||||
ourLog.info("Resources:\n * {}", forcedIds.stream().map(ForcedId::toString).collect(Collectors.joining("\n * ")));
|
||||
return forcedIds.size();
|
||||
});
|
||||
}
|
||||
|
||||
protected void logAllDateIndexes() {
|
||||
runInTransaction(() -> {
|
||||
ourLog.info("Date indexes:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(ResourceIndexedSearchParamDate::toString).collect(Collectors.joining("\n * ")));
|
||||
|
|
|
@ -3,16 +3,17 @@ package ca.uhn.fhir.jpa.cache;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.svc.IIdHelperService;
|
||||
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
|
||||
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import jakarta.persistence.criteria.CriteriaBuilder;
|
||||
import jakarta.persistence.criteria.CriteriaQuery;
|
||||
import jakarta.persistence.criteria.Path;
|
||||
import jakarta.persistence.criteria.Root;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
|
@ -23,11 +24,6 @@ import org.mockito.Mock;
|
|||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import jakarta.persistence.TypedQuery;
|
||||
import jakarta.persistence.criteria.CriteriaBuilder;
|
||||
import jakarta.persistence.criteria.CriteriaQuery;
|
||||
import jakarta.persistence.criteria.Path;
|
||||
import jakarta.persistence.criteria.Root;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -36,7 +32,6 @@ import java.util.List;
|
|||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertSame;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
@ -85,8 +80,6 @@ public class ResourceVersionSvcTest {
|
|||
CriteriaQuery<ResourceTable> criteriaQuery = Mockito.mock(CriteriaQuery.class);
|
||||
Root<ResourceTable> from = Mockito.mock(Root.class);
|
||||
Path path = Mockito.mock(Path.class);
|
||||
|
||||
TypedQuery<ForcedId> queryMock = Mockito.mock(TypedQuery.class);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -24,9 +24,9 @@ public class SchemaInitializationProviderTest {
|
|||
-- comment in a weird spot
|
||||
references CDR_XACT_LOG;
|
||||
|
||||
-- we can't use convering index until the autovacuum runs for those rows, which kills index performance
|
||||
-- we can't use covering index until the autovacuum runs for those rows, which kills index performance
|
||||
ALTER TABLE hfj_resource SET (autovacuum_vacuum_scale_factor = 0.01);
|
||||
ALTER TABLE hfj_forced_id SET (autovacuum_vacuum_scale_factor = 0.01);
|
||||
ALTER TABLE hfj_spidx_token SET (autovacuum_vacuum_scale_factor = 0.01);
|
||||
""";
|
||||
List<String> listToPopulate = new ArrayList<>();
|
||||
svc.parseSqlFileIntoIndividualStatements(DriverTypeEnum.POSTGRES_9_4, listToPopulate, input);
|
||||
|
@ -35,7 +35,7 @@ public class SchemaInitializationProviderTest {
|
|||
"create sequence foo",
|
||||
"alter table if exists CDR_XACT_LOG_STEP add constraint FK_XACTLOGSTEP_XACTLOG foreign key (LOG_PID) references CDR_XACT_LOG",
|
||||
"ALTER TABLE hfj_resource SET (autovacuum_vacuum_scale_factor = 0.01)",
|
||||
"ALTER TABLE hfj_forced_id SET (autovacuum_vacuum_scale_factor = 0.01)"
|
||||
"ALTER TABLE hfj_spidx_token SET (autovacuum_vacuum_scale_factor = 0.01)"
|
||||
));
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue