extends B
@Override
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
StopWatch w = new StopWatch();
- IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset);
+ ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(null, null);
+ RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details);
+ IBundleProvider retVal = myTransactionService
+ .withRequest(theRequestDetails)
+ .withRequestPartitionId(requestPartitionId)
+ .execute(() -> myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset, requestPartitionId));
ourLog.info("Processed global history in {}ms", w.getMillisAndRestart());
return retVal;
}
@Override
- @Transactional(propagation = Propagation.NEVER)
public T transaction(RequestDetails theRequestDetails, T theRequest) {
+ HapiTransactionService.noTransactionAllowed();
return myTransactionProcessor.transaction(theRequestDetails, theRequest, false);
}
@Override
- @Transactional(propagation = Propagation.MANDATORY)
public T transactionNested(RequestDetails theRequestDetails, T theRequest) {
+ HapiTransactionService.requireTransaction();
return myTransactionProcessor.transaction(theRequestDetails, theRequest, true);
}
@Override
- @Transactional(propagation = Propagation.MANDATORY)
public void preFetchResources(List
theResolvedIds) {
+ HapiTransactionService.requireTransaction();
List pids = theResolvedIds
.stream()
.map(t -> ((JpaPid) t).getId())
.collect(Collectors.toList());
- new QueryChunker().chunk(pids, ids->{
+ new QueryChunker().chunk(pids, ids -> {
/*
* Pre-fetch the resources we're touching in this transaction in mass - this reduced the
@@ -182,32 +195,32 @@ public abstract class BaseHapiFhirSystemDao extends B
List entityIds;
- entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).map(t->t.getId()).collect(Collectors.toList());
+ entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).map(t -> t.getId()).collect(Collectors.toList());
if (entityIds.size() > 0) {
preFetchIndexes(entityIds, "string", "myParamsString", null);
}
- entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).map(t->t.getId()).collect(Collectors.toList());
+ entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).map(t -> t.getId()).collect(Collectors.toList());
if (entityIds.size() > 0) {
preFetchIndexes(entityIds, "token", "myParamsToken", null);
}
- entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).map(t->t.getId()).collect(Collectors.toList());
+ entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).map(t -> t.getId()).collect(Collectors.toList());
if (entityIds.size() > 0) {
preFetchIndexes(entityIds, "date", "myParamsDate", null);
}
- entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsQuantityPopulated()).map(t->t.getId()).collect(Collectors.toList());
+ entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsQuantityPopulated()).map(t -> t.getId()).collect(Collectors.toList());
if (entityIds.size() > 0) {
preFetchIndexes(entityIds, "quantity", "myParamsQuantity", null);
}
- entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).map(t->t.getId()).collect(Collectors.toList());
+ entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).map(t -> t.getId()).collect(Collectors.toList());
if (entityIds.size() > 0) {
preFetchIndexes(entityIds, "resourceLinks", "myResourceLinks", null);
}
- entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasTags()).map(t->t.getId()).collect(Collectors.toList());
+ entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasTags()).map(t -> t.getId()).collect(Collectors.toList());
if (entityIds.size() > 0) {
myResourceTagDao.findByResourceIds(entityIds);
preFetchIndexes(entityIds, "tags", "myTags", null);
@@ -255,7 +268,7 @@ public abstract class BaseHapiFhirSystemDao extends B
}
private void preFetchIndexes(List theIds, String typeDesc, String fieldName, @Nullable List theEntityListToPopulate) {
- new QueryChunker().chunk(theIds, ids->{
+ new QueryChunker().chunk(theIds, ids -> {
TypedQuery query = myEntityManager.createQuery("FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", ResourceTable.class);
query.setParameter("IDS", ids);
List indexFetchOutcome = query.getResultList();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java
index 16e524d7ced..6f004aafe94 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/JpaStorageResourceParser.java
@@ -29,6 +29,8 @@ import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.entity.PartitionEntity;
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
+import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
+import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.BaseTag;
@@ -87,6 +89,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
private PartitionSettings myPartitionSettings;
@Autowired
private IPartitionLookupSvc myPartitionLookupSvc;
+ @Autowired
+ private ExternallyStoredResourceServiceRegistry myExternallyStoredResourceServiceRegistry;
@Override
public IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation) {
@@ -231,18 +235,29 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
}
@SuppressWarnings("unchecked")
- private R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum resourceEncoding, String decodedResourceText, Class resourceType) {
+ private R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum theResourceEncoding, String theDecodedResourceText, Class theResourceType) {
R retVal;
- if (resourceEncoding != ResourceEncodingEnum.DEL) {
+ if (theResourceEncoding == ResourceEncodingEnum.ESR) {
+
+ int colonIndex = theDecodedResourceText.indexOf(':');
+ Validate.isTrue(colonIndex > 0, "Invalid ESR address: %s", theDecodedResourceText);
+ String providerId = theDecodedResourceText.substring(0, colonIndex);
+ String address = theDecodedResourceText.substring(colonIndex + 1);
+ Validate.notBlank(providerId, "No provider ID in ESR address: %s", theDecodedResourceText);
+ Validate.notBlank(address, "No address in ESR address: %s", theDecodedResourceText);
+ IExternallyStoredResourceService provider = myExternallyStoredResourceServiceRegistry.getProvider(providerId);
+ retVal = (R) provider.fetchResource(address);
+
+ } else if (theResourceEncoding != ResourceEncodingEnum.DEL) {
IParser parser = new TolerantJsonParser(getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId());
try {
- retVal = parser.parseResource(resourceType, decodedResourceText);
+ retVal = parser.parseResource(theResourceType, theDecodedResourceText);
} catch (Exception e) {
StringBuilder b = new StringBuilder();
b.append("Failed to parse database resource[");
- b.append(myFhirContext.getResourceType(resourceType));
+ b.append(myFhirContext.getResourceType(theResourceType));
b.append("/");
b.append(theEntity.getIdDt().getIdPart());
b.append(" (pid ");
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
index 5a174360f57..76af53d0f4d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/expunge/ExpungeEverythingService.java
@@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
@@ -69,8 +71,10 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.StopWatch;
@@ -96,12 +100,13 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
protected EntityManager myEntityManager;
@Autowired
+ protected IInterceptorBroadcaster myInterceptorBroadcaster;
+ @Autowired
private HapiTransactionService myTxService;
- @Autowired
- protected IInterceptorBroadcaster myInterceptorBroadcaster;
-
@Autowired
private MemoryCacheService myMemoryCacheService;
+ @Autowired
+ private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
private int deletedResourceEntityCount;
@@ -118,67 +123,71 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING, hooks);
ourLog.info("BEGINNING GLOBAL $expunge");
- myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> {
+ Propagation propagation = Propagation.REQUIRES_NEW;
+ ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_EXPUNGE);
+ RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequest, details);
+
+ myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> {
counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null"));
});
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2WorkChunkEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2JobInstanceEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionResourceEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchParamPresentEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ForcedId.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamDate.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamNumber.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantityNormalized.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamString.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamToken.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamUri.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamCoords.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboStringUnique.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboTokenNonUnique.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceLink.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchResult.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchInclude.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConceptDesignation.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConcept.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSet.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptParentChildLink.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElementTarget.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElement.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroup.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMap.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptProperty.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptDesignation.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConcept.class));
- myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> {
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2WorkChunkEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2JobInstanceEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionResourceEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchParamPresentEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ForcedId.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamDate.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamNumber.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantityNormalized.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamString.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamToken.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamUri.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamCoords.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboStringUnique.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboTokenNonUnique.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceLink.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchResult.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchInclude.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConceptDesignation.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConcept.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSet.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptParentChildLink.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElementTarget.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElement.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroup.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMap.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptProperty.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptDesignation.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConcept.class, requestPartitionId));
+ myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> {
for (TermCodeSystem next : myEntityManager.createQuery("SELECT c FROM " + TermCodeSystem.class.getName() + " c", TermCodeSystem.class).getResultList()) {
next.setCurrentVersion(null);
myEntityManager.merge(next);
}
});
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystemVersion.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystem.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SubscriptionTable.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTag.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTag.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TagDefinition.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryProvenanceEntity.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystemVersion.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystem.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SubscriptionTable.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTag.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTag.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TagDefinition.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryProvenanceEntity.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class, requestPartitionId));
int counterBefore = counter.get();
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTable.class));
- counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, PartitionEntity.class));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTable.class, requestPartitionId));
+ counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, PartitionEntity.class, requestPartitionId));
deletedResourceEntityCount = counter.get() - counterBefore;
- myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> {
+ myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> {
counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d"));
});
- purgeAllCaches();
+ purgeAllCaches();
ourLog.info("COMPLETED GLOBAL $expunge - Deleted {} rows", counter.get());
}
@@ -189,42 +198,42 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
}
private void purgeAllCaches() {
- myMemoryCacheService.invalidateAllCaches();
- }
+ myMemoryCacheService.invalidateAllCaches();
+ }
- private int expungeEverythingByTypeWithoutPurging(RequestDetails theRequest, Class> theEntityType) {
- int outcome = 0;
- while (true) {
- StopWatch sw = new StopWatch();
+ private int expungeEverythingByTypeWithoutPurging(RequestDetails theRequest, Class> theEntityType, RequestPartitionId theRequestPartitionId) {
+ int outcome = 0;
+ while (true) {
+ StopWatch sw = new StopWatch();
- int count = myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> {
- CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
- CriteriaQuery> cq = cb.createQuery(theEntityType);
- cq.from(theEntityType);
- TypedQuery> query = myEntityManager.createQuery(cq);
- query.setMaxResults(1000);
- List> results = query.getResultList();
- for (Object result : results) {
- myEntityManager.remove(result);
- }
- return results.size();
- });
+ int count = myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).withRequestPartitionId(theRequestPartitionId).execute(() -> {
+ CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
+ CriteriaQuery> cq = cb.createQuery(theEntityType);
+ cq.from(theEntityType);
+ TypedQuery> query = myEntityManager.createQuery(cq);
+ query.setMaxResults(1000);
+ List> results = query.getResultList();
+ for (Object result : results) {
+ myEntityManager.remove(result);
+ }
+ return results.size();
+ });
- outcome += count;
- if (count == 0) {
- break;
- }
+ outcome += count;
+ if (count == 0) {
+ break;
+ }
- ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw.toString());
- }
- return outcome;
- }
+ ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw);
+ }
+ return outcome;
+ }
@Override
public int expungeEverythingByType(Class> theEntityType) {
- int result = expungeEverythingByTypeWithoutPurging(null, theEntityType);
- purgeAllCaches();
- return result;
+ int result = expungeEverythingByTypeWithoutPurging(null, theEntityType, RequestPartitionId.allPartitions());
+ purgeAllCaches();
+ return result;
}
@Override
@@ -235,7 +244,7 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
private int doExpungeEverythingQuery(String theQuery) {
StopWatch sw = new StopWatch();
int outcome = myEntityManager.createQuery(theQuery).executeUpdate();
- ourLog.debug("SqlQuery affected {} rows in {}: {}", outcome, sw.toString(), theQuery);
+ ourLog.debug("SqlQuery affected {} rows in {}: {}", outcome, sw, theQuery);
return outcome;
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
index 07d44f949eb..050860b47da 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkExportJobEntity.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity;
*/
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
+import ca.uhn.fhir.rest.api.Constants;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.r5.model.InstantType;
@@ -46,6 +47,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
+import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.left;
@@ -58,7 +60,7 @@ import static org.apache.commons.lang3.StringUtils.left;
* See the BulkExportAppCtx for job details
*/
@Entity
-@Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = {
+@Table(name = BulkExportJobEntity.HFJ_BLK_EXPORT_JOB, uniqueConstraints = {
@UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")
}, indexes = {
@Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
@@ -68,13 +70,15 @@ public class BulkExportJobEntity implements Serializable {
public static final int REQUEST_LENGTH = 1024;
public static final int STATUS_MESSAGE_LEN = 500;
+ public static final String JOB_ID = "JOB_ID";
+ public static final String HFJ_BLK_EXPORT_JOB = "HFJ_BLK_EXPORT_JOB";
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXJOB_PID")
@SequenceGenerator(name = "SEQ_BLKEXJOB_PID", sequenceName = "SEQ_BLKEXJOB_PID")
@Column(name = "PID")
private Long myId;
- @Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false)
+ @Column(name = JOB_ID, length = UUID_LENGTH, nullable = false)
private String myJobId;
@Enumerated(EnumType.STRING)
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java
index 5c4195b5423..27d75dbedc9 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/BulkImportJobEntity.java
@@ -40,21 +40,24 @@ import javax.persistence.Version;
import java.io.Serializable;
import java.util.Date;
+import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH;
import static org.apache.commons.lang3.StringUtils.left;
@Entity
-@Table(name = "HFJ_BLK_IMPORT_JOB", uniqueConstraints = {
+@Table(name = BulkImportJobEntity.HFJ_BLK_IMPORT_JOB, uniqueConstraints = {
@UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID")
})
public class BulkImportJobEntity implements Serializable {
+ public static final String HFJ_BLK_IMPORT_JOB = "HFJ_BLK_IMPORT_JOB";
+ public static final String JOB_ID = "JOB_ID";
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID")
@SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID")
@Column(name = "PID")
private Long myId;
- @Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false, updatable = false)
+ @Column(name = JOB_ID, length = UUID_LENGTH, nullable = false, updatable = false)
private String myJobId;
@Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN)
private String myJobDescription;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java
index f49e3d10b11..b76e632a75d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/ResourceReindexJobEntity.java
@@ -24,6 +24,7 @@ import ca.uhn.fhir.rest.api.Constants;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
+import org.hl7.fhir.r4.model.InstantType;
import javax.persistence.Column;
import javax.persistence.Entity;
@@ -149,8 +150,8 @@ public class ResourceReindexJobEntity implements Serializable {
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("id", myId)
.append("resourceType", myResourceType)
- .append("thresholdLow", myThresholdLow)
- .append("thresholdHigh", myThresholdHigh);
+ .append("thresholdLow", new InstantType(myThresholdLow))
+ .append("thresholdHigh", new InstantType(myThresholdHigh));
if (myDeleted) {
b.append("deleted", myDeleted);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java
index 6170cdc8ffc..d656df3778d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/Search.java
@@ -76,13 +76,18 @@ import static org.apache.commons.lang3.StringUtils.left;
})
public class Search implements ICachedSearchDetails, Serializable {
+ /**
+ * Long enough to accommodate a full UUID (36) with an additional prefix
+ * used by megascale (12)
+ */
@SuppressWarnings("WeakerAccess")
- public static final int UUID_COLUMN_LENGTH = 36;
+ public static final int SEARCH_UUID_COLUMN_LENGTH = 48;
public static final String HFJ_SEARCH = "HFJ_SEARCH";
private static final int MAX_SEARCH_QUERY_STRING = 10000;
private static final int FAILURE_MESSAGE_LENGTH = 500;
private static final long serialVersionUID = 1L;
private static final Logger ourLog = LoggerFactory.getLogger(Search.class);
+ public static final String SEARCH_UUID = "SEARCH_UUID";
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "CREATED", nullable = false, updatable = false)
private Date myCreated;
@@ -136,7 +141,7 @@ public class Search implements ICachedSearchDetails, Serializable {
private SearchStatusEnum myStatus;
@Column(name = "TOTAL_COUNT", nullable = true)
private Integer myTotalCount;
- @Column(name = "SEARCH_UUID", length = UUID_COLUMN_LENGTH, nullable = false, updatable = false)
+ @Column(name = SEARCH_UUID, length = SEARCH_UUID_COLUMN_LENGTH, nullable = false, updatable = false)
private String myUuid;
@SuppressWarnings("unused")
@Version
@@ -146,6 +151,9 @@ public class Search implements ICachedSearchDetails, Serializable {
@Column(name = "SEARCH_PARAM_MAP", nullable = true)
private byte[] mySearchParameterMap;
+ @Transient
+ private transient SearchParameterMap mySearchParameterMapTransient;
+
/**
* This isn't currently persisted in the DB as it's only used for offset mode. We could
* change this if needed in the future.
@@ -363,10 +371,12 @@ public class Search implements ICachedSearchDetails, Serializable {
myTotalCount = theTotalCount;
}
+ @Override
public String getUuid() {
return myUuid;
}
+ @Override
public void setUuid(String theUuid) {
myUuid = theUuid;
}
@@ -402,11 +412,26 @@ public class Search implements ICachedSearchDetails, Serializable {
return myVersion;
}
+ /**
+ * Note that this is not always set! We set this if we're storing a
+ * Search in {@link SearchStatusEnum#PASSCMPLET} status since we'll need
+ * the map in order to restart, but otherwise we save space and time by
+ * not storing it.
+ */
public Optional getSearchParameterMap() {
- return Optional.ofNullable(mySearchParameterMap).map(t -> SerializationUtils.deserialize(mySearchParameterMap));
+ if (mySearchParameterMapTransient != null) {
+ return Optional.of(mySearchParameterMapTransient);
+ }
+ SearchParameterMap searchParameterMap = null;
+ if (mySearchParameterMap != null) {
+ searchParameterMap = SerializationUtils.deserialize(mySearchParameterMap);
+ mySearchParameterMapTransient = searchParameterMap;
+ }
+ return Optional.ofNullable(searchParameterMap);
}
public void setSearchParameterMap(SearchParameterMap theSearchParameterMap) {
+ mySearchParameterMapTransient = theSearchParameterMap;
mySearchParameterMap = SerializationUtils.serialize(theSearchParameterMap);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddress.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddress.java
new file mode 100644
index 00000000000..6c2abc8601c
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddress.java
@@ -0,0 +1,52 @@
+package ca.uhn.fhir.jpa.esr;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+public class ExternallyStoredResourceAddress {
+
+ private final String myProviderId;
+ private final String myLocation;
+
+ /**
+ * Constructor
+ *
+ * @param theProviderId The ID of the provider which will handle this address. Must match the ID returned by a registered {@link IExternallyStoredResourceService}.
+ * @param theLocation The actual location for the provider to resolve. The format of this string is entirely up to the {@link IExternallyStoredResourceService} and only needs to make sense to it.
+ */
+ public ExternallyStoredResourceAddress(String theProviderId, String theLocation) {
+ myProviderId = theProviderId;
+ myLocation = theLocation;
+ }
+
+ /**
+ * @return The ID of the provider which will handle this address. Must match the ID returned by a registered {@link IExternallyStoredResourceService}.
+ */
+ public String getProviderId() {
+ return myProviderId;
+ }
+
+ /**
+ * @return The actual location for the provider to resolve. The format of this string is entirely up to the {@link IExternallyStoredResourceService} and only needs to make sense to it.
+ */
+ public String getLocation() {
+ return myLocation;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java
new file mode 100644
index 00000000000..dfe1e1a61b1
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceAddressMetadataKey.java
@@ -0,0 +1,39 @@
+package ca.uhn.fhir.jpa.esr;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
+
+public class ExternallyStoredResourceAddressMetadataKey extends ResourceMetadataKeyEnum {
+
+ /**
+ * Singleton instance
+ */
+ public static final ExternallyStoredResourceAddressMetadataKey INSTANCE = new ExternallyStoredResourceAddressMetadataKey();
+
+ /**
+ * Constructor
+ */
+ private ExternallyStoredResourceAddressMetadataKey() {
+ super("ExternallyStoredResourceAddress", ExternallyStoredResourceAddress.class);
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java
new file mode 100644
index 00000000000..1a89700110f
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/ExternallyStoredResourceServiceRegistry.java
@@ -0,0 +1,69 @@
+package ca.uhn.fhir.jpa.esr;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import org.apache.commons.lang3.Validate;
+
+import javax.annotation.Nonnull;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.apache.commons.lang3.StringUtils.defaultString;
+
+public class ExternallyStoredResourceServiceRegistry {
+
+ private static final String VALID_ID_PATTERN = "[a-zA-Z0-9_.-]+";
+ private final Map myIdToProvider = new HashMap<>();
+
+ /**
+ * Registers a new provider. Do not call this method after the server has been started.
+ *
+ * @param theProvider The provider to register.
+ */
+ public void registerProvider(@Nonnull IExternallyStoredResourceService theProvider) {
+ String id = defaultString(theProvider.getId());
+ Validate.isTrue(id.matches(VALID_ID_PATTERN), "Invalid provider ID (must match pattern " + VALID_ID_PATTERN + "): %s", id);
+ Validate.isTrue(!myIdToProvider.containsKey(id), "Already have a provider with ID: %s", id);
+
+ myIdToProvider.put(id, theProvider);
+ }
+
+ /**
+ * Do we have any providers registered?
+ */
+ public boolean hasProviders() {
+ return !myIdToProvider.isEmpty();
+ }
+
+ /**
+ * Clears all registered providers. This method is mostly intended for unit tests.
+ */
+ public void clearProviders() {
+ myIdToProvider.clear();
+ }
+
+ @Nonnull
+ public IExternallyStoredResourceService getProvider(@Nonnull String theProviderId) {
+ IExternallyStoredResourceService retVal = myIdToProvider.get(theProviderId);
+ Validate.notNull(retVal, "Invalid ESR provider ID: %s", theProviderId);
+ return retVal;
+ }
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/IExternallyStoredResourceService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/IExternallyStoredResourceService.java
new file mode 100644
index 00000000000..da8b9c43097
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/IExternallyStoredResourceService.java
@@ -0,0 +1,41 @@
+package ca.uhn.fhir.jpa.esr;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import org.hl7.fhir.instance.model.api.IBaseResource;
+
+public interface IExternallyStoredResourceService {
+
+ /**
+ * Returns the ID of this provider. No two providers may return the same
+ * ID, and this provider should always return the same ID.
+ */
+ String getId();
+
+ /**
+ * Fetches the given resource using the given address string
+ *
+ * @param theAddress The address string is a format that is entirely up to the individual provider. HAPI FHIR
+ * doesn't try to understand it.
+ * @return HAPI FHIR may modify the returned object, so it is important to always return a new object for every call here (careful with caching!)
+ */
+ IBaseResource fetchResource(String theAddress);
+}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java
new file mode 100644
index 00000000000..8e0c2b9f59a
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/esr/package-info.java
@@ -0,0 +1,34 @@
+
+/**
+ * This package stores the mechanism for Externally Stored Resources (ESRs).
+ *
+ * A normal resource stores its contents in the {@link ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable}
+ * entity as text (compressed JSON or otherwise) with one row in that table per version of the
+ * resource. An ESR still creates a row in that table, but stores a reference to actual body
+ * storage instead.
+ *
+ * THIS IS AN EXPERIMENTAL API - IT MAY GO AWAY OR CHANGE IN THE FUTURE
+ *
+ * @since 6.6.0
+ */
+package ca.uhn.fhir.jpa.esr;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
index bf682ab110f..be042d64c3c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/migrate/tasks/HapiFhirJpaMigrationTasks.java
@@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.migrate.tasks;
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
+import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
@@ -31,13 +33,13 @@ import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
-import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
+import ca.uhn.fhir.jpa.model.entity.StorageSettings;
import ca.uhn.fhir.util.VersionEnum;
import java.util.Arrays;
@@ -48,6 +50,8 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
+import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH;
+
@SuppressWarnings({"SqlNoDataSourceInspection", "SpellCheckingInspection"})
public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
@@ -86,15 +90,13 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
init600(); // 20211102 -
init610();
init620();
- init630();
init640();
init660();
}
protected void init660() {
-
-
Builder version = forVersion(VersionEnum.V6_6_0);
+
// fix Postgres clob types - that stupid oid driver problem is still there
// BT2_JOB_INSTANCE.PARAMS_JSON_LOB
version.onTable("BT2_JOB_INSTANCE")
@@ -105,12 +107,25 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
// BT2_WORK_CHUNK.CHUNK_DATA
version.onTable("BT2_WORK_CHUNK")
.migratePostgresTextClobToBinaryClob("20230208.3", "CHUNK_DATA");
+
+ version
+ .onTable(Search.HFJ_SEARCH)
+ .addColumn("20230215.1", Search.SEARCH_UUID)
+ .nullable()
+ .type(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH);
+ version
+ .onTable(BulkImportJobEntity.HFJ_BLK_IMPORT_JOB)
+ .addColumn("20230215.2", BulkImportJobEntity.JOB_ID)
+ .nullable()
+ .type(ColumnTypeEnum.STRING, UUID_LENGTH);
+ version
+ .onTable(BulkExportJobEntity.HFJ_BLK_EXPORT_JOB)
+ .addColumn("20230215.3", BulkExportJobEntity.JOB_ID)
+ .nullable()
+ .type(ColumnTypeEnum.STRING, UUID_LENGTH);
}
+
protected void init640() {
-
- }
-
- protected void init630() {
Builder version = forVersion(VersionEnum.V6_3_0);
// start forced_id inline migration
@@ -129,7 +144,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
.online(true)
.withColumns("SEARCH_PID")
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
-;
+ ;
}
@@ -474,10 +489,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
// Fix for https://github.com/hapifhir/hapi-fhir-jpaserver-starter/issues/328
version.onTable("NPM_PACKAGE_VER")
- .modifyColumn("20220501.1","FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20);
+ .modifyColumn("20220501.1", "FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20);
version.onTable("NPM_PACKAGE_VER_RES")
- .modifyColumn("20220501.2","FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20);
+ .modifyColumn("20220501.2", "FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20);
// Fix for https://gitlab.com/simpatico.ai/cdr/-/issues/3166
version.onTable("MPI_LINK")
@@ -822,7 +837,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
// Bulk Import Job
Builder.BuilderAddTableByColumns blkImportJobTable = version.addTableByColumns("20210410.1", "HFJ_BLK_IMPORT_JOB", "PID");
blkImportJobTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
- blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, Search.UUID_COLUMN_LENGTH);
+ blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, UUID_LENGTH);
blkImportJobTable.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10);
blkImportJobTable.addColumn("STATUS_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
blkImportJobTable.addColumn("STATUS_MESSAGE").nullable().type(ColumnTypeEnum.STRING, 500);
@@ -1500,7 +1515,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks {
version.onTable("HFJ_SEARCH")
.addColumn("20190814.6", "SEARCH_PARAM_MAP").nullable().type(ColumnTypeEnum.BLOB);
version.onTable("HFJ_SEARCH")
- .modifyColumn("20190814.7", "SEARCH_UUID").nonNullable().withType(ColumnTypeEnum.STRING, 36);
+ .modifyColumn("20190814.7", "SEARCH_UUID").nonNullable().withType(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH);
version.onTable("HFJ_SEARCH_PARM").dropThisTable("20190814.8");
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java
index 3bcb29103a8..fb000b232a0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaConformanceProviderDstu2.java
@@ -69,7 +69,7 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider {
* Constructor
*/
@CoverageIgnore
- public JpaConformanceProviderDstu2(){
+ public JpaConformanceProviderDstu2() {
super();
super.setCache(false);
setIncludeResourceCounts(true);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java
index c86aa3b83bd..db9baad7b3d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DatabaseBackedPagingProvider.java
@@ -28,6 +28,8 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.BasePagingProvider;
import org.springframework.beans.factory.annotation.Autowired;
+import javax.annotation.Nullable;
+
// Note: this class is not annotated with @Service because we want to
// explicitly define it in BaseConfig.java. This is done so that
// implementors can override if they want to.
@@ -51,6 +53,7 @@ public class DatabaseBackedPagingProvider extends BasePagingProvider {
/**
* Constructor
+ *
* @deprecated Use {@link DatabaseBackedPagingProvider} as this constructor has no purpose
*/
@Deprecated
@@ -60,12 +63,19 @@ public class DatabaseBackedPagingProvider extends BasePagingProvider {
@Override
public synchronized IBundleProvider retrieveResultList(RequestDetails theRequestDetails, String theId) {
- myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, "Bundle", null, null);
PersistedJpaBundleProvider provider = myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, theId);
- if (!provider.ensureSearchEntityLoaded()) {
+ return validateAndReturnBundleProvider(provider);
+ }
+
+ /**
+ * Subclasses may override in order to modify the bundle provider being returned
+ */
+ @Nullable
+ protected PersistedJpaBundleProvider validateAndReturnBundleProvider(PersistedJpaBundleProvider theBundleProvider) {
+ if (!theBundleProvider.ensureSearchEntityLoaded()) {
return null;
}
- return provider;
+ return theBundleProvider;
}
@Override
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
index dbc45228ccb..54d53fefd84 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProvider.java
@@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
@@ -40,9 +41,10 @@ import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
-import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
import ca.uhn.fhir.model.primitive.InstantDt;
@@ -98,7 +100,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
@Autowired
private ISearchCacheSvc mySearchCacheSvc;
@Autowired
- private RequestPartitionHelperSvc myRequestPartitionHelperSvc;
+ private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
@Autowired
private JpaStorageSettings myStorageSettings;
@Autowired
@@ -130,6 +132,11 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
myUuid = theSearch.getUuid();
}
+ @VisibleForTesting
+ public void setRequestPartitionHelperSvcForUnitTest(IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
+ myRequestPartitionHelperSvc = theRequestPartitionHelperSvc;
+ }
+
protected Search getSearchEntity() {
return mySearchEntity;
}
@@ -148,7 +155,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(),
mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
- RequestPartitionId partitionId = getRequestPartitionIdForHistory();
+ RequestPartitionId partitionId = getRequestPartitionId();
List results = historyBuilder.fetchEntities(partitionId, theOffset, theFromIndex,
theToIndex, mySearchEntity.getHistorySearchStyle());
@@ -194,18 +201,26 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
}
@Nonnull
- private RequestPartitionId getRequestPartitionIdForHistory() {
+ protected final RequestPartitionId getRequestPartitionId() {
if (myRequestPartitionId == null) {
- if (mySearchEntity.getResourceId() != null) {
- // If we have an ID, we've already checked the partition and made sure it's appropriate
- myRequestPartitionId = RequestPartitionId.allPartitions();
+ ReadPartitionIdRequestDetails details;
+ if (mySearchEntity == null) {
+ details = ReadPartitionIdRequestDetails.forSearchUuid(myUuid);
+ } else if (mySearchEntity.getSearchType() == SearchTypeEnum.HISTORY) {
+ details = ReadPartitionIdRequestDetails.forHistory(mySearchEntity.getResourceType(), null);
} else {
- myRequestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(myRequest, mySearchEntity.getResourceType(), null);
+ SearchParameterMap params = mySearchEntity.getSearchParameterMap().orElse(null);
+ details = ReadPartitionIdRequestDetails.forSearchType(mySearchEntity.getResourceType(), params, null);
}
+ myRequestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(myRequest, details);
}
return myRequestPartitionId;
}
+ public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
+ myRequestPartitionId = theRequestPartitionId;
+ }
+
protected List doSearchOrEverything(final int theFromIndex, final int theToIndex) {
if (mySearchEntity.getTotalCount() != null && mySearchEntity.getNumFound() <= 0) {
// No resources to fetch (e.g. we did a _summary=count search)
@@ -217,8 +232,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, resourceType);
- final List pidsSubList = mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest);
- return myTxService.withRequest(myRequest).execute(() -> toResourceList(sb, pidsSubList));
+ RequestPartitionId requestPartitionId = getRequestPartitionId();
+ final List pidsSubList = mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest, requestPartitionId);
+ return myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(requestPartitionId)
+ .execute(() -> {
+ return toResourceList(sb, pidsSubList);
+ });
}
/**
@@ -226,7 +247,10 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
*/
public boolean ensureSearchEntityLoaded() {
if (mySearchEntity == null) {
- Optional searchOpt = myTxService.withRequest(myRequest).execute(() -> mySearchCacheSvc.fetchByUuid(myUuid));
+ Optional searchOpt = myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(myRequestPartitionId)
+ .execute(() -> mySearchCacheSvc.fetchByUuid(myUuid));
if (!searchOpt.isPresent()) {
return false;
}
@@ -263,11 +287,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
key = MemoryCacheService.HistoryCountKey.forSystem();
}
- Function supplier = k -> myTxService.withRequest(myRequest).execute(() -> {
- HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
- Long count = historyBuilder.fetchCount(getRequestPartitionIdForHistory());
- return count.intValue();
- });
+ Function supplier = k -> myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(getRequestPartitionId())
+ .execute(() -> {
+ HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
+ Long count = historyBuilder.fetchCount(getRequestPartitionId());
+ return count.intValue();
+ });
boolean haveOffset = mySearchEntity.getLastUpdatedLow() != null || mySearchEntity.getLastUpdatedHigh() != null;
@@ -307,7 +334,10 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
switch (mySearchEntity.getSearchType()) {
case HISTORY:
- return myTxService.withRequest(myRequest).execute(() -> doHistoryInTransaction(mySearchEntity.getOffset(), theFromIndex, theToIndex));
+ return myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(getRequestPartitionId())
+ .execute(() -> doHistoryInTransaction(mySearchEntity.getOffset(), theFromIndex, theToIndex));
case SEARCH:
case EVERYTHING:
default:
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
index 855f7dec8c2..c14aa477a74 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaBundleProviderFactory.java
@@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.search;
* #L%
*/
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.config.JpaConfig;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.entity.Search;
@@ -52,16 +53,16 @@ public class PersistedJpaBundleProviderFactory {
return (PersistedJpaBundleProvider) retVal;
}
- public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder) {
- return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder);
+ public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) {
+ return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder, theRequestPartitionId);
}
- public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset) {
- return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null);
+ public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, RequestPartitionId theRequestPartitionId) {
+ return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null, theRequestPartitionId);
}
- public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType) {
+ public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType, RequestPartitionId theRequestPartitionId) {
String resourceName = defaultIfBlank(theResourceType, null);
Search search = new Search();
@@ -76,7 +77,10 @@ public class PersistedJpaBundleProviderFactory {
search.setStatus(SearchStatusEnum.FINISHED);
search.setHistorySearchStyle(searchParameterType);
- return newInstance(theRequest, search);
+ PersistedJpaBundleProvider provider = newInstance(theRequest, search);
+ provider.setRequestPartitionId(theRequestPartitionId);
+
+ return provider;
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
index 10b7e7da280..3169170700a 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/PersistedJpaSearchFirstPageBundleProvider.java
@@ -20,17 +20,17 @@ package ca.uhn.fhir.jpa.search;
* #L%
*/
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.entity.Search;
+import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask;
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
-import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
-import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -48,11 +48,15 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
/**
* Constructor
*/
- public PersistedJpaSearchFirstPageBundleProvider(Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestDetails theRequest) {
+ public PersistedJpaSearchFirstPageBundleProvider(Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
super(theRequest, theSearch.getUuid());
+
+ assert theSearch.getSearchType() != SearchTypeEnum.HISTORY;
+
setSearchEntity(theSearch);
mySearchTask = theSearchTask;
mySearchBuilder = theSearchBuilder;
+ super.setRequestPartitionId(theRequestPartitionId);
}
@Nonnull
@@ -67,9 +71,12 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
final List pids = mySearchTask.getResourcePids(theFromIndex, theToIndex);
ourLog.trace("Done fetching search resource PIDs");
- List retVal = myTxService.withRequest(myRequest).execute(() -> {
- return toResourceList(mySearchBuilder, pids);
- });
+ RequestPartitionId requestPartitionId = getRequestPartitionId();
+
+ List retVal = myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(requestPartitionId)
+ .execute(() -> toResourceList(mySearchBuilder, pids));
long totalCountWanted = theToIndex - theFromIndex;
long totalCountMatch = (int) retVal
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
index 16916368829..0830789e38c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImpl.java
@@ -69,7 +69,7 @@ import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.BeanFactory;
-import org.springframework.data.domain.AbstractPageRequest;
+import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Component;
@@ -108,7 +108,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
private final SearchBuilderFactory mySearchBuilderFactory;
private final ISynchronousSearchSvc mySynchronousSearchSvc;
private final PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
- private final IRequestPartitionHelperSvc myRequestPartitionHelperService;
private final ISearchParamRegistry mySearchParamRegistry;
private final SearchStrategyFactory mySearchStrategyFactory;
private final ExceptionService myExceptionSvc;
@@ -154,7 +153,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
mySearchBuilderFactory = theSearchBuilderFactory;
mySynchronousSearchSvc = theSynchronousSearchSvc;
myPersistedJpaBundleProviderFactory = thePersistedJpaBundleProviderFactory;
- myRequestPartitionHelperService = theRequestPartitionHelperService;
mySearchParamRegistry = theSearchParamRegistry;
mySearchStrategyFactory = theSearchStrategyFactory;
myExceptionSvc = theExceptionSvc;
@@ -201,9 +199,19 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
/**
* This method is called by the HTTP client processing thread in order to
* fetch resources.
+ *
+ * This method must not be called from inside a transaction. The rationale is that
+ * the {@link Search} entity is treated as a piece of shared state across client threads
+ * accessing the same thread, so we need to be able to update that table in a transaction
+ * and commit it right away in order for that to work. Examples of needing to do this
+ * include if two different clients request the same search and are both paging at the
+ * same time, but also includes clients that are hacking the paging links to
+ * fetch multiple pages of a search result in parallel. In both cases we need to only
+ * let one of them actually activate the search, or we will have conficts. The other thread
+ * just needs to wait until the first one actually fetches more results.
*/
@Override
- public List getResources(final String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails) {
+ public List getResources(final String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
assert !TransactionSynchronizationManager.isActualTransactionActive();
// If we're actively searching right now, don't try to do anything until at least one batch has been
@@ -240,13 +248,12 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
Callable searchCallback = () -> mySearchCacheSvc
.fetchByUuid(theUuid)
.orElseThrow(() -> myExceptionSvc.newUnknownSearchException(theUuid));
- if (theRequestDetails != null) {
- search = myTxService.withRequest(theRequestDetails).execute(searchCallback);
- } else {
- search = HapiTransactionService.invokeCallableAndHandleAnyException(searchCallback);
- }
-
+ search = myTxService
+ .withRequest(theRequestDetails)
+ .withRequestPartitionId(theRequestPartitionId)
+ .execute(searchCallback);
QueryParameterUtils.verifySearchHasntFailedOrThrowInternalErrorException(search);
+
if (search.getStatus() == SearchStatusEnum.FINISHED) {
ourLog.trace("Search entity marked as finished with {} results", search.getNumFound());
break;
@@ -272,7 +279,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
String resourceType = search.getResourceType();
SearchParameterMap params = search.getSearchParameterMap().orElseThrow(() -> new IllegalStateException("No map in PASSCOMPLET search"));
IFhirResourceDao> resourceDao = myDaoRegistry.getResourceDao(resourceType);
- RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceType, params, null);
SearchTaskParameters parameters = new SearchTaskParameters(
search,
@@ -280,7 +286,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
params,
resourceType,
theRequestDetails,
- requestPartitionId,
+ theRequestPartitionId,
myOnRemoveSearchTask,
mySyncSize
);
@@ -299,7 +305,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
ourLog.trace("Finished looping");
- List pids = fetchResultPids(theUuid, theFrom, theTo, theRequestDetails, search);
+ List pids = fetchResultPids(theUuid, theFrom, theTo, theRequestDetails, search, theRequestPartitionId);
ourLog.trace("Fetched {} results", pids.size());
@@ -307,8 +313,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
}
@Nonnull
- private List fetchResultPids(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, Search theSearch) {
- List pids = myTxService.withRequest(theRequestDetails).execute(() -> mySearchResultCacheSvc.fetchResultPids(theSearch, theFrom, theTo));
+ private List fetchResultPids(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, Search theSearch, RequestPartitionId theRequestPartitionId) {
+ List pids = mySearchResultCacheSvc.fetchResultPids(theSearch, theFrom, theTo, theRequestDetails, theRequestPartitionId);
if (pids == null) {
throw myExceptionSvc.newUnknownSearchException(theUuid);
}
@@ -325,7 +331,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
Search search = new Search();
QueryParameterUtils.populateSearchEntity(theParams, theResourceType, searchUuid, queryString, search, theRequestPartitionId);
- myStorageInterceptorHooks.callStoragePresearchRegistered(theRequestDetails, theParams, search);
+ myStorageInterceptorHooks.callStoragePresearchRegistered(theRequestDetails, theParams, search, theRequestPartitionId);
validateSearch(theParams);
@@ -483,7 +489,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
myIdToSearchTask.put(theSearch.getUuid(), task);
task.call();
- PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb);
+ PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb, theRequestPartitionId);
ourLog.debug("Search initial phase completed in {}ms", w.getMillis());
return retVal;
@@ -492,34 +498,37 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
@Nullable
private PersistedJpaBundleProvider findCachedQuery(SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theQueryString, RequestPartitionId theRequestPartitionId) {
// May be null
- return myTxService.withRequest(theRequestDetails).execute(() -> {
+ return myTxService
+ .withRequest(theRequestDetails)
+ .withRequestPartitionId(theRequestPartitionId)
+ .execute(() -> {
- // Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH
- HookParams params = new HookParams()
- .add(SearchParameterMap.class, theParams)
- .add(RequestDetails.class, theRequestDetails)
- .addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
- Object outcome = CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params);
- if (Boolean.FALSE.equals(outcome)) {
- return null;
- }
+ // Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH
+ HookParams params = new HookParams()
+ .add(SearchParameterMap.class, theParams)
+ .add(RequestDetails.class, theRequestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
+ Object outcome = CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params);
+ if (Boolean.FALSE.equals(outcome)) {
+ return null;
+ }
- // Check for a search matching the given hash
- Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType, theRequestPartitionId);
- if (searchToUse == null) {
- return null;
- }
+ // Check for a search matching the given hash
+ Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType, theRequestPartitionId);
+ if (searchToUse == null) {
+ return null;
+ }
- ourLog.debug("Reusing search {} from cache", searchToUse.getUuid());
- // Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED
- params = new HookParams()
- .add(SearchParameterMap.class, theParams)
- .add(RequestDetails.class, theRequestDetails)
- .addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
- CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params);
+ ourLog.debug("Reusing search {} from cache", searchToUse.getUuid());
+ // Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED
+ params = new HookParams()
+ .add(SearchParameterMap.class, theParams)
+ .add(RequestDetails.class, theRequestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
+ CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params);
- return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid());
- });
+ return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid());
+ });
}
@Nullable
@@ -563,7 +572,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
int pageIndex = theFromIndex / pageSize;
- Pageable page = new AbstractPageRequest(pageIndex, pageSize) {
+ return new PageRequest(pageIndex, pageSize, Sort.unsorted()) {
private static final long serialVersionUID = 1L;
@Override
@@ -571,33 +580,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
return theFromIndex;
}
- @Override
- public Sort getSort() {
- return Sort.unsorted();
- }
-
- @Override
- public Pageable next() {
- return null;
- }
-
- @Override
- public Pageable previous() {
- return null;
- }
-
- @Override
- public Pageable first() {
- return null;
- }
-
- @Override
- public Pageable withPage(int theI) {
- return null;
- }
};
-
- return page;
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java
index e0c77d25840..2b535f2be5e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/SynchronousSearchSvcImpl.java
@@ -36,6 +36,7 @@ import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.rest.api.Constants;
@@ -85,6 +86,9 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
@Autowired
private EntityManager myEntityManager;
+ @Autowired
+ private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
+
private int mySyncSize = 250;
@Override
@@ -97,8 +101,11 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
boolean wantCount = theParamWantOnlyCount || theParamOrConfigWantCount;
// Execute the query and make sure we return distinct results
-
- return myTxService.withRequest(theRequestDetails).readOnly().execute(() -> {
+ return myTxService
+ .withRequest(theRequestDetails)
+ .withRequestPartitionId(theRequestPartitionId)
+ .readOnly()
+ .execute(() -> {
// Load the results synchronously
final List pids = new ArrayList<>();
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
index 7184310d9ab..93779296013 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java
@@ -878,7 +878,7 @@ public class SearchBuilder implements ISearchBuilder {
resourceId.setVersion(version);
if (version != null && !version.equals(next.getVersion())) {
IFhirResourceDao extends IBaseResource> dao = myDaoRegistry.getResourceDao(resourceType);
- next = dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null);
+ next = (IBaseResourceEntity) dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null);
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java
index 76cfbf3790c..ff021676441 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/StorageInterceptorHooksFacade.java
@@ -23,8 +23,8 @@ package ca.uhn.fhir.jpa.search.builder;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.entity.Search;
-import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@@ -46,13 +46,15 @@ public class StorageInterceptorHooksFacade {
* @param theRequestDetails
* @param theParams
* @param search
+ * @param theRequestPartitionId
*/
- public void callStoragePresearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theParams, Search search) {
+ public void callStoragePresearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theParams, Search search, RequestPartitionId theRequestPartitionId) {
HookParams params = new HookParams()
.add(ICachedSearchDetails.class, search)
.add(RequestDetails.class, theRequestDetails)
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
- .add(SearchParameterMap.class, theParams);
+ .add(SearchParameterMap.class, theParams)
+ .add(RequestPartitionId.class, theRequestPartitionId);
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params);
}
//private IInterceptorBroadcaster myInterceptorBroadcaster;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java
index 6d4bec2f6a1..f766c8dffc0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchContinuationTask.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.search.builder.tasks;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
@@ -74,8 +75,12 @@ public class SearchContinuationTask extends SearchTask {
@Override
public Void call() {
try {
- myTxService.withRequest(myRequestDetails).execute(() -> {
- List previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids(getSearch());
+ RequestPartitionId requestPartitionId = getRequestPartitionId();
+ myTxService
+ .withRequest(myRequestDetails)
+ .withRequestPartitionId(requestPartitionId)
+ .execute(() -> {
+ List previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids(getSearch(), myRequestDetails, requestPartitionId);
if (previouslyAddedResourcePids == null) {
throw myExceptionSvc.newUnknownSearchException(getSearch().getUuid());
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java
index 759457b8d3f..f401a5bfcff 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/tasks/SearchTask.java
@@ -92,7 +92,10 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
public class SearchTask implements Callable {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchTask.class);
-
+ // injected beans
+ protected final HapiTransactionService myTxService;
+ protected final FhirContext myContext;
+ protected final ISearchResultCacheSvc mySearchResultCacheSvc;
private final SearchParameterMap myParams;
private final IDao myCallingDao;
private final String myResourceType;
@@ -104,6 +107,14 @@ public class SearchTask implements Callable {
private final RequestPartitionId myRequestPartitionId;
private final SearchRuntimeDetails mySearchRuntimeDetails;
private final Transaction myParentTransaction;
+ private final Consumer myOnRemove;
+ private final int mySyncSize;
+ private final Integer myLoadingThrottleForUnitTests;
+ private final IInterceptorBroadcaster myInterceptorBroadcaster;
+ private final SearchBuilderFactory mySearchBuilderFactory;
+ private final JpaStorageSettings myStorageSettings;
+ private final ISearchCacheSvc mySearchCacheSvc;
+ private final IPagingProvider myPagingProvider;
private Search mySearch;
private boolean myAbortRequested;
private int myCountSavedTotal = 0;
@@ -112,22 +123,6 @@ public class SearchTask implements Callable {
private boolean myAdditionalPrefetchThresholdsRemaining;
private List myPreviouslyAddedResourcePids;
private Integer myMaxResultsToFetch;
-
- private final Consumer myOnRemove;
-
- private final int mySyncSize;
- private final Integer myLoadingThrottleForUnitTests;
-
- // injected beans
- protected final HapiTransactionService myTxService;
- protected final FhirContext myContext;
- private final IInterceptorBroadcaster myInterceptorBroadcaster;
- private final SearchBuilderFactory mySearchBuilderFactory;
- protected final ISearchResultCacheSvc mySearchResultCacheSvc;
- private final JpaStorageSettings myStorageSettings;
- private final ISearchCacheSvc mySearchCacheSvc;
- private final IPagingProvider myPagingProvider;
-
/**
* Constructor
*/
@@ -169,6 +164,10 @@ public class SearchTask implements Callable {
myParentTransaction = ElasticApm.currentTransaction();
}
+ protected RequestPartitionId getRequestPartitionId() {
+ return myRequestPartitionId;
+ }
+
/**
* This method is called by the server HTTP thread, and
* will block until at least one page of results have been
@@ -267,11 +266,18 @@ public class SearchTask implements Callable {
}
public void saveSearch() {
- myTxService.execute(myRequest, null, Propagation.REQUIRES_NEW, Isolation.DEFAULT, ()->doSaveSearch());
+ myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(myRequestPartitionId)
+ .withPropagation(Propagation.REQUIRES_NEW)
+ .execute(() -> doSaveSearch());
}
private void saveUnsynced(final IResultIterator theResultIter) {
- myTxService.withRequest(myRequest).execute(()->{
+ myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(myRequestPartitionId)
+ .execute(() -> {
if (mySearch.getId() == null) {
doSaveSearch();
}
@@ -303,7 +309,7 @@ public class SearchTask implements Callable {
// Actually store the results in the query cache storage
myCountSavedTotal += unsyncedPids.size();
myCountSavedThisPass += unsyncedPids.size();
- mySearchResultCacheSvc.storeResults(mySearch, mySyncedPids, unsyncedPids);
+ mySearchResultCacheSvc.storeResults(mySearch, mySyncedPids, unsyncedPids, myRequest, getRequestPartitionId());
synchronized (mySyncedPids) {
int numSyncedThisPass = unsyncedPids.size();
@@ -387,7 +393,11 @@ public class SearchTask implements Callable {
// Create an initial search in the DB and give it an ID
saveSearch();
- myTxService.execute(myRequest, null, Propagation.REQUIRED, Isolation.READ_COMMITTED, ()->doSearch());
+ myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(myRequestPartitionId)
+ .withIsolation(Isolation.READ_COMMITTED)
+ .execute(() -> doSearch());
mySearchRuntimeDetails.setSearchStatus(mySearch.getStatus());
if (mySearch.getStatus() == SearchStatusEnum.FINISHED) {
@@ -506,13 +516,16 @@ public class SearchTask implements Callable {
ourLog.trace("Got count {}", count);
- myTxService.withRequest(myRequest).execute(()->{
- mySearch.setTotalCount(count.intValue());
- if (myParamWantOnlyCount) {
- mySearch.setStatus(SearchStatusEnum.FINISHED);
- }
- doSaveSearch();
- });
+ myTxService
+ .withRequest(myRequest)
+ .withRequestPartitionId(myRequestPartitionId)
+ .execute(() -> {
+ mySearch.setTotalCount(count.intValue());
+ if (myParamWantOnlyCount) {
+ mySearch.setStatus(SearchStatusEnum.FINISHED);
+ }
+ doSaveSearch();
+ });
if (myParamWantOnlyCount) {
return;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java
index 6e1d5128a1f..52e9bce687d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchCacheSvcImpl.java
@@ -111,7 +111,6 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
}
@Override
- @Transactional(propagation = Propagation.NEVER)
public Optional tryToMarkSearchAsInProgress(Search theSearch) {
ourLog.trace("Going to try to change search status from {} to {}", theSearch.getStatus(), SearchStatusEnum.LOADING);
try {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java
index 907715501c2..027745081b0 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/DatabaseSearchResultCacheSvcImpl.java
@@ -20,17 +20,18 @@ package ca.uhn.fhir.jpa.search.cache;
* #L%
*/
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
+import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchResult;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
-import org.springframework.transaction.annotation.Propagation;
-import org.springframework.transaction.annotation.Transactional;
import java.util.Collections;
import java.util.List;
@@ -43,50 +44,65 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc {
@Autowired
private ISearchResultDao mySearchResultDao;
+ @Autowired
+ private IHapiTransactionService myTransactionService;
+
@Override
- @Transactional(propagation = Propagation.REQUIRED)
- public List fetchResultPids(Search theSearch, int theFrom, int theTo) {
- final Pageable page = toPage(theFrom, theTo);
- if (page == null) {
- return Collections.emptyList();
- }
+ public List fetchResultPids(Search theSearch, int theFrom, int theTo, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
+ return myTransactionService
+ .withRequest(theRequestDetails)
+ .withRequestPartitionId(theRequestPartitionId)
+ .execute(() -> {
+ final Pageable page = toPage(theFrom, theTo);
+ if (page == null) {
+ return Collections.emptyList();
+ }
- List retVal = mySearchResultDao
- .findWithSearchPid(theSearch.getId(), page)
- .getContent();
+ List retVal = mySearchResultDao
+ .findWithSearchPid(theSearch.getId(), page)
+ .getContent();
- ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size());
+ ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size());
- return JpaPid.fromLongList(retVal);
+ return JpaPid.fromLongList(retVal);
+ });
}
@Override
- @Transactional(propagation = Propagation.REQUIRED)
- public List fetchAllResultPids(Search theSearch) {
- List retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId());
- ourLog.trace("fetchAllResultPids returned {} pids", retVal.size());
- return JpaPid.fromLongList(retVal);
+ public List fetchAllResultPids(Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
+ return myTransactionService
+ .withRequest(theRequestDetails)
+ .withRequestPartitionId(theRequestPartitionId)
+ .execute(() -> {
+ List retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId());
+ ourLog.trace("fetchAllResultPids returned {} pids", retVal.size());
+ return JpaPid.fromLongList(retVal);
+ });
}
@Override
- @Transactional(propagation = Propagation.REQUIRED)
- public void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids) {
- List resultsToSave = Lists.newArrayList();
+ public void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
+ myTransactionService
+ .withRequest(theRequestDetails)
+ .withRequestPartitionId(theRequestPartitionId)
+ .execute(() -> {
+ List resultsToSave = Lists.newArrayList();
- ourLog.debug("Storing {} results with {} previous for search", theNewResourcePids.size(), thePreviouslyStoredResourcePids.size());
+ ourLog.debug("Storing {} results with {} previous for search", theNewResourcePids.size(), thePreviouslyStoredResourcePids.size());
- int order = thePreviouslyStoredResourcePids.size();
- for (JpaPid nextPid : theNewResourcePids) {
- SearchResult nextResult = new SearchResult(theSearch);
- nextResult.setResourcePid(nextPid.getId());
- nextResult.setOrder(order);
- resultsToSave.add(nextResult);
- ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid());
+ int order = thePreviouslyStoredResourcePids.size();
+ for (JpaPid nextPid : theNewResourcePids) {
+ SearchResult nextResult = new SearchResult(theSearch);
+ nextResult.setResourcePid(nextPid.getId());
+ nextResult.setOrder(order);
+ resultsToSave.add(nextResult);
+ ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid());
- order++;
- }
+ order++;
+ }
- mySearchResultDao.saveAll(resultsToSave);
+ mySearchResultDao.saveAll(resultsToSave);
+ });
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java
index 4b4e12aba7b..9880e22ee7e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/cache/ISearchResultCacheSvc.java
@@ -20,8 +20,10 @@ package ca.uhn.fhir.jpa.search.cache;
* #L%
*/
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
import javax.annotation.Nullable;
import java.util.List;
@@ -30,12 +32,12 @@ public interface ISearchResultCacheSvc {
/**
* @param theSearch The search - This method is not required to persist any chances to the Search object, it is only provided here for identification
* @param thePreviouslyStoredResourcePids A list of resource PIDs that have previously been saved to this search
- * @param theNewResourcePids A list of new resoure PIDs to add to this search (these ones have not been previously saved)
+ * @param theNewResourcePids A list of new resource PIDs to add to this search (these ones have not been previously saved)
*/
- void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids);
+ void storeResults(Search theSearch, List thePreviouslyStoredResourcePids, List theNewResourcePids, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId);
/**
- * Fetch a sunset of the search result IDs from the cache
+ * Fetch a subset of the search result IDs from the cache
*
* @param theSearch The search to fetch IDs for
* @param theFrom The starting index (inclusive)
@@ -44,7 +46,7 @@ public interface ISearchResultCacheSvc {
* have been removed from the cache for some reason, such as expiry or manual purge)
*/
@Nullable
- List fetchResultPids(Search theSearch, int theFrom, int theTo);
+ List fetchResultPids(Search theSearch, int theFrom, int theTo, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId);
/**
* Fetch all result PIDs for a given search with no particular order required
@@ -54,6 +56,6 @@ public interface ISearchResultCacheSvc {
* have been removed from the cache for some reason, such as expiry or manual purge)
*/
@Nullable
- List fetchAllResultPids(Search theSearch);
+ List fetchAllResultPids(Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java
new file mode 100644
index 00000000000..826c64b4dca
--- /dev/null
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/JpaHapiTransactionService.java
@@ -0,0 +1,45 @@
+package ca.uhn.fhir.jpa.util;
+
+/*-
+ * #%L
+ * HAPI FHIR JPA Server
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
+import org.springframework.orm.jpa.JpaDialect;
+import org.springframework.orm.jpa.JpaTransactionManager;
+import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
+
+public class JpaHapiTransactionService extends HapiTransactionService {
+
+ private volatile Boolean myCustomIsolationSupported;
+
+ @Override
+ public boolean isCustomIsolationSupported() {
+ if (myCustomIsolationSupported == null) {
+ if (myTransactionManager instanceof JpaTransactionManager) {
+ JpaDialect jpaDialect = ((JpaTransactionManager) myTransactionManager).getJpaDialect();
+ myCustomIsolationSupported = (jpaDialect instanceof HibernateJpaDialect);
+ } else {
+ myCustomIsolationSupported = false;
+ }
+ }
+ return myCustomIsolationSupported;
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
index eb3c5855045..0f98464e7b1 100644
--- a/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-elastic-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-ips/pom.xml b/hapi-fhir-jpaserver-ips/pom.xml
index f448ee6a1fa..6a26fd72124 100644
--- a/hapi-fhir-jpaserver-ips/pom.xml
+++ b/hapi-fhir-jpaserver-ips/pom.xml
@@ -3,7 +3,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml
index aa8b77635bd..d737529f485 100644
--- a/hapi-fhir-jpaserver-mdm/pom.xml
+++ b/hapi-fhir-jpaserver-mdm/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java
index 5440057bd78..98c2059ad4e 100644
--- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java
+++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImpl.java
@@ -44,7 +44,6 @@ import ca.uhn.fhir.mdm.model.MdmTransactionContext;
import ca.uhn.fhir.mdm.provider.MdmControllerHelper;
import ca.uhn.fhir.mdm.provider.MdmControllerUtil;
import ca.uhn.fhir.model.primitive.IdDt;
-import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@@ -125,9 +124,8 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
}
@Override
- public Page queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext, RequestDetails theRequestDetails)
- {
- RequestPartitionId theReadPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, null);
+ public Page queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext, RequestDetails theRequestDetails) {
+ RequestPartitionId theReadPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null);
Page resultPage;
if (theReadPartitionId.hasPartitionIds()) {
theMdmQuerySearchParameters.setPartitionIds(theReadPartitionId.getPartitionIds());
@@ -166,12 +164,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
@Override
public Page getDuplicateGoldenResources(MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest, RequestDetails theRequestDetails, String theRequestResourceType) {
Page resultPage;
- RequestPartitionId readPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, null);
+ RequestPartitionId readPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null);
- if (readPartitionId.isAllPartitions()){
- resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, null, theRequestResourceType);
+ if (readPartitionId.isAllPartitions()) {
+ resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, null, theRequestResourceType);
} else {
- resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, readPartitionId.getPartitionIds(), theRequestResourceType);
+ resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, readPartitionId.getPartitionIds(), theRequestResourceType);
}
validateMdmQueryPermissions(readPartitionId, resultPage.getContent(), theRequestDetails);
@@ -205,12 +203,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
public IBaseParameters submitMdmClearJob(@Nonnull List theResourceNames, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) {
MdmClearJobParameters params = new MdmClearJobParameters();
params.setResourceNames(theResourceNames);
- if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue().longValue() > 0) {
+ if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue().longValue() > 0) {
params.setBatchSize(theBatchSize.getValue().intValue());
}
- ReadPartitionIdRequestDetails details= new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null);
- RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, details);
+ ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_MDM_CLEAR);
+ RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details);
params.setRequestPartitionId(requestPartition);
JobInstanceStartRequest request = new JobInstanceStartRequest();
@@ -226,10 +224,10 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
@Override
- public IBaseParameters submitMdmSubmitJob(List theUrls, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) {
+ public IBaseParameters submitMdmSubmitJob(List theUrls, IPrimitiveType theBatchSize, ServletRequestDetails theRequestDetails) {
MdmSubmitJobParameters params = new MdmSubmitJobParameters();
- if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue().longValue() > 0) {
+ if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue().longValue() > 0) {
params.setBatchSize(theBatchSize.getValue().intValue());
}
params.setRequestPartitionId(RequestPartitionId.allPartitions());
@@ -256,12 +254,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
myIMdmLinkUpdaterSvc.notDuplicateGoldenResource(goldenResource, target, theMdmTransactionContext);
}
- private void validateMdmQueryPermissions(RequestPartitionId theRequestPartitionId, List theMdmLinkJsonList, RequestDetails theRequestDetails){
+ private void validateMdmQueryPermissions(RequestPartitionId theRequestPartitionId, List theMdmLinkJsonList, RequestDetails theRequestDetails) {
Set seenResourceTypes = new HashSet<>();
- for (MdmLinkJson mdmLinkJson : theMdmLinkJsonList){
+ for (MdmLinkJson mdmLinkJson : theMdmLinkJsonList) {
IdDt idDt = new IdDt(mdmLinkJson.getSourceId());
- if (!seenResourceTypes.contains(idDt.getResourceType())){
+ if (!seenResourceTypes.contains(idDt.getResourceType())) {
myRequestPartitionHelperSvc.validateHasPartitionPermissions(theRequestDetails, idDt.getResourceType(), theRequestPartitionId);
seenResourceTypes.add(idDt.getResourceType());
}
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderMatchR4Test.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderMatchR4Test.java
index b3537378e63..b0db3b2674c 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderMatchR4Test.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/provider/MdmProviderMatchR4Test.java
@@ -36,7 +36,7 @@ public class MdmProviderMatchR4Test extends BaseProviderR4Test {
}
@Test
- public void testMatch() throws Exception {
+ public void testMatch() {
Patient jane = buildJanePatient();
jane.setActive(true);
Patient createdJane = createPatient(jane);
diff --git a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImplTest.java b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImplTest.java
index bf6b348b92a..25f39dde818 100644
--- a/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImplTest.java
+++ b/hapi-fhir-jpaserver-mdm/src/test/java/ca/uhn/fhir/jpa/mdm/svc/MdmControllerSvcImplTest.java
@@ -32,6 +32,8 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentMatcher;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.SpyBean;
import org.springframework.data.domain.Page;
@@ -41,6 +43,7 @@ import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.stream.Collectors;
import static ca.uhn.fhir.mdm.provider.MdmProviderDstu3Plus.DEFAULT_PAGE_SIZE;
import static ca.uhn.fhir.mdm.provider.MdmProviderDstu3Plus.MAX_PAGE_SIZE;
@@ -50,6 +53,8 @@ import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
public class MdmControllerSvcImplTest extends BaseLinkR4Test {
+ private static final Logger ourLog = LoggerFactory.getLogger(MdmControllerSvcImplTest.class);
+
@Autowired
IMdmControllerSvc myMdmControllerSvc;
@@ -137,13 +142,18 @@ public class MdmControllerSvcImplTest extends BaseLinkR4Test {
assertEquals(MdmLinkSourceEnum.AUTO, link.getLinkSource());
assertLinkCount(2);
+ runInTransaction(()->{
+ ourLog.info("Links: {}", myMdmLinkDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * ")));
+ });
+
+ myCaptureQueriesListener.clear();
Page resultPage = myMdmControllerSvc.getDuplicateGoldenResources(null,
new MdmPageRequest((Integer) null, null, DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE),
new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.fromPartitionId(1)), null);
+ myCaptureQueriesListener.logSelectQueries();
- assertEquals(resultPage.getContent().size(), 1);
-
- assertEquals(resultPage.getContent().get(0).getSourceId(), patient.getIdElement().getResourceType() + "/" + patient.getIdElement().getIdPart());
+ assertEquals(1, resultPage.getContent().size());
+ assertEquals(patient.getIdElement().getResourceType() + "/" + patient.getIdElement().getIdPart(), resultPage.getContent().get(0).getSourceId());
Mockito.verify(myRequestPartitionHelperSvc, Mockito.atLeastOnce()).validateHasPartitionPermissions(any(), eq("Patient"), argThat(new PartitionIdMatcher(requestPartitionId)));
}
diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml
index 89d92050002..27143b9e4b8 100644
--- a/hapi-fhir-jpaserver-model/pom.xml
+++ b/hapi-fhir-jpaserver-model/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java
index ab1b9fba0fc..02e94f816be 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/config/PartitionSettings.java
@@ -30,6 +30,25 @@ public class PartitionSettings {
private boolean myIncludePartitionInSearchHashes = false;
private boolean myUnnamedPartitionMode;
private Integer myDefaultPartitionId;
+ private boolean myAlwaysOpenNewTransactionForDifferentPartition;
+
+ /**
+ * Should we always open a new database transaction if the partition context changes
+ *
+ * @since 6.6.0
+ */
+ public boolean isAlwaysOpenNewTransactionForDifferentPartition() {
+ return myAlwaysOpenNewTransactionForDifferentPartition;
+ }
+
+ /**
+ * Should we always open a new database transaction if the partition context changes
+ *
+ * @since 6.6.0
+ */
+ public void setAlwaysOpenNewTransactionForDifferentPartition(boolean theAlwaysOpenNewTransactionForDifferentPartition) {
+ myAlwaysOpenNewTransactionForDifferentPartition = theAlwaysOpenNewTransactionForDifferentPartition;
+ }
/**
* If set to true
(default is false
) the PARTITION_ID
value will be factored into the
@@ -136,6 +155,12 @@ public class PartitionSettings {
myDefaultPartitionId = theDefaultPartitionId;
}
+ /**
+ * If enabled the JPA server will allow unqualified cross partition reference
+ */
+ public boolean isAllowUnqualifiedCrossPartitionReference() {
+ return myAllowReferencesAcrossPartitions.equals(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
+ }
public enum CrossPartitionReferenceMode {
@@ -145,18 +170,11 @@ public class PartitionSettings {
NOT_ALLOWED,
/**
- * References can cross partition boundaries, in a way that hides the existence of partitions to the end user
+ * References can cross partition boundaries, with an assumption that boundaries
+ * will be managed by the database.
*/
- ALLOWED_UNQUALIFIED
+ ALLOWED_UNQUALIFIED,
}
- /**
- * If enabled the JPA server will allow unqualified cross partition reference
- *
- */
- public boolean isAllowUnqualifiedCrossPartitionReference() {
- return myAllowReferencesAcrossPartitions.equals(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
- }
-
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java
index 09d3fec755e..f57ab77846f 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java
@@ -20,9 +20,10 @@ package ca.uhn.fhir.jpa.model.cross;
* #L%
*/
+import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import org.hl7.fhir.instance.model.api.IIdType;
-public interface IBasePersistedResource extends IResourceLookup {
+public interface IBasePersistedResource> extends IResourceLookup {
IIdType getIdDt();
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java
index 952cd84c82a..cb3c5a5597f 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IResourceLookup.java
@@ -24,7 +24,7 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
import java.util.Date;
-public interface IResourceLookup {
+public interface IResourceLookup> {
String getResourceType();
/**
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java
index f3465520aa9..b25eabd30cd 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/HapiSequenceStyleGenerator.java
@@ -65,8 +65,12 @@ public class HapiSequenceStyleGenerator implements IdentifierGenerator, Persiste
@Override
public Serializable generate(SharedSessionContractImplementor theSession, Object theObject) throws HibernateException {
- Long next = (Long) myGen.generate(theSession, theObject);
- return myIdMassager.massage(myGeneratorName, next);
+ Long retVal = myIdMassager.generate(myGeneratorName);
+ if (retVal == null) {
+ Long next = (Long) myGen.generate(theSession, theObject);
+ retVal = myIdMassager.massage(myGeneratorName, next);
+ }
+ return retVal;
}
@Override
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java
index b46ca79350a..bf940b2e47f 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/dialect/ISequenceValueMassager.java
@@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.model.dialect;
* #L%
*/
+import javax.annotation.Nullable;
+
/**
* This is an internal API and may change or disappear without notice
*
@@ -29,6 +31,17 @@ public interface ISequenceValueMassager {
Long massage(String theGeneratorName, Long theId);
+ /**
+ * If desired, the massager can supply an ID on its own. This method is tried first,
+ * and if it returns null, the normal hi-lo generator is called and then
+ * {@link #massage(String, Long)} is called on the output of that.
+ *
+ * @return Returns an ID or null
+ */
+ @Nullable
+ default Long generate(String theGeneratorName) {
+ return null;
+ }
final class NoopSequenceValueMassager implements ISequenceValueMassager {
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java
index 556b51d7e25..2d0d95ddbe0 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
import ca.uhn.fhir.model.primitive.InstantDt;
import org.hibernate.annotations.OptimisticLock;
@@ -38,8 +39,10 @@ import java.util.Date;
import static org.apache.commons.lang3.StringUtils.defaultString;
@MappedSuperclass
-public abstract class BaseHasResource extends BasePartitionable implements IBaseResourceEntity, IBasePersistedResource {
+public abstract class BaseHasResource extends BasePartitionable implements IBaseResourceEntity, IBasePersistedResource {
+ public static final String RES_PUBLISHED = "RES_PUBLISHED";
+ public static final String RES_UPDATED = "RES_UPDATED";
@Column(name = "RES_DELETED_AT", nullable = true)
@Temporal(TemporalType.TIMESTAMP)
private Date myDeleted;
@@ -54,12 +57,12 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase
private boolean myHasTags;
@Temporal(TemporalType.TIMESTAMP)
- @Column(name = "RES_PUBLISHED", nullable = false)
+ @Column(name = RES_PUBLISHED, nullable = false)
@OptimisticLock(excluded = true)
private Date myPublished;
@Temporal(TemporalType.TIMESTAMP)
- @Column(name = "RES_UPDATED", nullable = false)
+ @Column(name = RES_UPDATED, nullable = false)
@OptimisticLock(excluded = true)
private Date myUpdated;
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java
index 389d1c18ef3..9d5bef4c2bb 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/PartitionablePartitionId.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.model.entity;
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
@@ -118,4 +119,14 @@ public class PartitionablePartitionId implements Cloneable {
return RequestPartitionId.defaultPartition();
}
}
+
+ @Nonnull
+ public static PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId, @Nonnull PartitionSettings thePartitionSettings) {
+ Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull();
+ if (partitionId == null) {
+ partitionId = thePartitionSettings.getDefaultPartitionId();
+ }
+ return new PartitionablePartitionId(partitionId, theRequestPartitionId.getPartitionDate());
+ }
+
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java
index 6a09380f77a..4dda8e4898e 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceEncodingEnum.java
@@ -23,9 +23,6 @@ package ca.uhn.fhir.jpa.model.entity;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.parser.IParser;
-/**
- * @see ResourceHistoryTable#ENCODING_COL_LENGTH
- */
public enum ResourceEncodingEnum {
/*
@@ -47,7 +44,13 @@ public enum ResourceEncodingEnum {
/**
* Resource was deleted - No contents expected
*/
- DEL;
+ DEL,
+
+ /**
+ * Externally stored resource - Resource text is a reference to an external storage location,
+ * which will be stored in {@link ResourceHistoryTable#getResourceTextVc()}
+ */
+ ESR;
public IParser newParser(FhirContext theContext) {
return theContext.newJsonParser();
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
index 40f902926ec..30a940970d3 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceHistoryTable.java
@@ -92,6 +92,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
@Column(name = "RES_TEXT_VC", length = RES_TEXT_VC_MAX_LENGTH, nullable = true)
@OptimisticLock(excluded = true)
private String myResourceTextVc;
+
@Column(name = "RES_ENCODING", nullable = false, length = ENCODING_COL_LENGTH)
@Enumerated(EnumType.STRING)
@OptimisticLock(excluded = true)
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java
index 9e26ac1392d..6c4bfa0bf42 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java
@@ -98,6 +98,9 @@ public class ResourceLink extends BaseResourceIndex {
@Transient
private transient String myTargetResourceId;
+ /**
+ * Constructor
+ */
public ResourceLink() {
super();
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
index 4a913de4e85..956bee5f5ea 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java
@@ -81,17 +81,19 @@ import static org.hibernate.envers.RelationTargetAuditMode.NOT_AUDITED;
@Indexed(routingBinder= @RoutingBinderRef(type = ResourceTableRoutingBinder.class))
@Entity
-@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = {
+@Table(name = ResourceTable.HFJ_RESOURCE, uniqueConstraints = {}, indexes = {
// Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE
- @Index(name = "IDX_RES_DATE", columnList = "RES_UPDATED"),
+ @Index(name = "IDX_RES_DATE", columnList = BaseHasResource.RES_UPDATED),
@Index(name = "IDX_RES_TYPE_DEL_UPDATED", columnList = "RES_TYPE,RES_DELETED_AT,RES_UPDATED,PARTITION_ID,RES_ID"),
})
@NamedEntityGraph(name = "Resource.noJoins")
@Audited(targetAuditMode = NOT_AUDITED)
-public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource, IResourceLookup {
+public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource {
public static final int RESTYPE_LEN = 40;
private static final int MAX_LANGUAGE_LENGTH = 20;
private static final long serialVersionUID = 1L;
+ public static final String HFJ_RESOURCE = "HFJ_RESOURCE";
+ public static final String RES_TYPE = "RES_TYPE";
/**
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
@@ -267,7 +269,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
@OptimisticLock(excluded = true)
private Collection myResourceLinksAsTarget;
- @Column(name = "RES_TYPE", length = RESTYPE_LEN, nullable = false)
+ @Column(name = RES_TYPE, length = RESTYPE_LEN, nullable = false)
@FullTextField
@OptimisticLock(excluded = true)
private String myResourceType;
@@ -773,7 +775,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
}
@Override
- public IResourcePersistentId getPersistentId() {
+ public JpaPid getPersistentId() {
return JpaPid.fromId(getId());
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java
index 9591becee61..84ac8db7257 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java
+++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/StorageSettings.java
@@ -42,6 +42,10 @@ import java.util.Set;
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
+/**
+ * This class contains configuration options common to all hapi-fhir-storage implementations.
+ * Ultimately it should live in that project
+ */
public class StorageSettings {
/**
* @since 5.6.0
diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml
index bc6152c23b7..02300acc9cf 100755
--- a/hapi-fhir-jpaserver-searchparam/pom.xml
+++ b/hapi-fhir-jpaserver-searchparam/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java
similarity index 51%
rename from hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java
rename to hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java
index 7611b7e1f23..39dd974dd83 100644
--- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/interceptor/model/ReadPartitionIdRequestDetails.java
@@ -2,7 +2,7 @@ package ca.uhn.fhir.interceptor.model;
/*-
* #%L
- * HAPI FHIR - Core Library
+ * HAPI FHIR Search Parameters
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
@@ -20,31 +20,52 @@ package ca.uhn.fhir.interceptor.model;
* #L%
*/
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
+import javax.annotation.Nonnull;
import javax.annotation.Nullable;
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+
+/**
+ * This is a model class used as a parameter for the interceptor pointcut
+ * {@link ca.uhn.fhir.interceptor.api.Pointcut#STORAGE_PARTITION_IDENTIFY_READ}.
+ */
public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails {
private final String myResourceType;
private final RestOperationTypeEnum myRestOperationType;
private final IIdType myReadResourceId;
- private final Object mySearchParams;
+ @Nullable
+ private final SearchParameterMap mySearchParams;
+ @Nullable
private final IBaseResource myConditionalTargetOrNull;
+ @Nullable
+ private final String mySearchUuid;
+ @Nullable
+ private final String myExtendedOperationName;
- public ReadPartitionIdRequestDetails(String theResourceType, RestOperationTypeEnum theRestOperationType, IIdType theReadResourceId, Object theSearchParams, @Nullable IBaseResource theConditionalTargetOrNull) {
+ private ReadPartitionIdRequestDetails(String theResourceType, RestOperationTypeEnum theRestOperationType, IIdType theReadResourceId, @Nullable SearchParameterMap theSearchParams, @Nullable IBaseResource theConditionalTargetOrNull, @Nullable String theSearchUuid, String theExtendedOperationName) {
myResourceType = theResourceType;
myRestOperationType = theRestOperationType;
myReadResourceId = theReadResourceId;
mySearchParams = theSearchParams;
myConditionalTargetOrNull = theConditionalTargetOrNull;
+ mySearchUuid = theSearchUuid;
+ myExtendedOperationName = theExtendedOperationName;
}
- public static ReadPartitionIdRequestDetails forRead(String theResourceType, IIdType theId, boolean theIsVread) {
- RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ;
- return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null);
+ @Nullable
+ public String getExtendedOperationName() {
+ return myExtendedOperationName;
+ }
+
+ @Nullable
+ public String getSearchUuid() {
+ return mySearchUuid;
}
public String getResourceType() {
@@ -59,18 +80,45 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails {
return myReadResourceId;
}
- public Object getSearchParams() {
+ @Nullable
+ public SearchParameterMap getSearchParams() {
return mySearchParams;
}
+ @Nullable
public IBaseResource getConditionalTargetOrNull() {
return myConditionalTargetOrNull;
}
+ /**
+ * @param theId The resource ID (must include a resource type and ID)
+ */
+ public static ReadPartitionIdRequestDetails forRead(IIdType theId) {
+ assert isNotBlank(theId.getResourceType());
+ assert isNotBlank(theId.getIdPart());
+ return forRead(theId.getResourceType(), theId, false);
+ }
+ public static ReadPartitionIdRequestDetails forOperation(@Nullable String theResourceType, @Nullable IIdType theId, @Nonnull String theExtendedOperationName) {
+ RestOperationTypeEnum op;
+ if (theId != null) {
+ op = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
+ } else if (theResourceType != null) {
+ op = RestOperationTypeEnum.EXTENDED_OPERATION_TYPE;
+ } else {
+ op = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
+ }
- public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, Object theParams, IBaseResource theConditionalOperationTargetOrNull) {
- return new ReadPartitionIdRequestDetails(theResourceType, RestOperationTypeEnum.SEARCH_TYPE, null, theParams, theConditionalOperationTargetOrNull);
+ return new ReadPartitionIdRequestDetails(theResourceType, op, null, null, null, null, theExtendedOperationName);
+ }
+
+ public static ReadPartitionIdRequestDetails forRead(String theResourceType, @Nonnull IIdType theId, boolean theIsVread) {
+ RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ;
+ return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null, null, null);
+ }
+
+ public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) {
+ return new ReadPartitionIdRequestDetails(theResourceType, RestOperationTypeEnum.SEARCH_TYPE, null, theParams, theConditionalOperationTargetOrNull, null, null);
}
public static ReadPartitionIdRequestDetails forHistory(String theResourceType, IIdType theIdType) {
@@ -82,6 +130,10 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails {
} else {
restOperationTypeEnum = RestOperationTypeEnum.HISTORY_SYSTEM;
}
- return new ReadPartitionIdRequestDetails(theResourceType, restOperationTypeEnum, theIdType, null, null);
+ return new ReadPartitionIdRequestDetails(theResourceType, restOperationTypeEnum, theIdType, null, null, null, null);
+ }
+
+ public static ReadPartitionIdRequestDetails forSearchUuid(String theUuid) {
+ return new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.GET_PAGE, null, null, null, theUuid, null);
}
}
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java
index b636978994a..e359c626b95 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/IResourceVersionSvc.java
@@ -28,8 +28,8 @@ import javax.annotation.Nonnull;
import java.util.List;
/**
- * This interface is used by the {@link IResourceChangeListenerCacheRefresher} to read resources matching the provided
- * search parameter map in the repository and compare them to caches stored in the {@link IResourceChangeListenerRegistry}.
+ * This interface is used by the {@literal IResourceChangeListenerCacheRefresher} to read resources matching the provided
+ * search parameter map in the repository and compare them to caches stored in the {@literal IResourceChangeListenerRegistry}.
*/
public interface IResourceVersionSvc {
@Nonnull
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java
index 98ae5bcfe07..624904341fb 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/cache/ResourceVersionMap.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.cache;
* #L%
*/
-import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.model.primitive.IdDt;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
@@ -44,7 +44,7 @@ public class ResourceVersionMap {
private final Map myMap = new HashMap<>();
private ResourceVersionMap() {}
- public static ResourceVersionMap fromResourceTableEntities(List theEntities) {
+ public static ResourceVersionMap fromResourceTableEntities(List extends IBasePersistedResource> theEntities) {
ResourceVersionMap retval = new ResourceVersionMap();
theEntities.forEach(entity -> retval.add(entity.getIdDt()));
return retval;
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java
similarity index 81%
rename from hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java
rename to hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java
index 6f7c943b6c1..ea9daa36e77 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/partition/IRequestPartitionHelperSvc.java
@@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.partition;
/*-
* #%L
- * HAPI FHIR Storage api
+ * HAPI FHIR Search Parameters
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
@@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.partition;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
-import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
@@ -35,18 +34,18 @@ import java.util.Set;
public interface IRequestPartitionHelperSvc {
@Nonnull
- RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType, ReadPartitionIdRequestDetails theDetails);
+ RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails);
@Nonnull
- default RequestPartitionId determineReadPartitionForRequestForRead(RequestDetails theRequest, String theResourceType, IIdType theId) {
+ default RequestPartitionId determineReadPartitionForRequestForRead(RequestDetails theRequest, String theResourceType, @Nonnull IIdType theId) {
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forRead(theResourceType, theId, theId.hasVersionIdPart());
- return determineReadPartitionForRequest(theRequest, theResourceType, details);
+ return determineReadPartitionForRequest(theRequest, details);
}
@Nonnull
default RequestPartitionId determineReadPartitionForRequestForSearchType(RequestDetails theRequest, String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) {
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forSearchType(theResourceType, theParams, theConditionalOperationTargetOrNull);
- return determineReadPartitionForRequest(theRequest, theResourceType, details);
+ return determineReadPartitionForRequest(theRequest, details);
}
RequestPartitionId determineGenericPartitionForRequest(RequestDetails theRequestDetails);
@@ -54,18 +53,16 @@ public interface IRequestPartitionHelperSvc {
@Nonnull
default RequestPartitionId determineReadPartitionForRequestForHistory(RequestDetails theRequest, String theResourceType, IIdType theIdType) {
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(theResourceType, theIdType);
- return determineReadPartitionForRequest(theRequest, theResourceType, details);
+ return determineReadPartitionForRequest(theRequest, details);
}
@Nonnull
- default void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId){}
+ default void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {
+ }
@Nonnull
RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType);
- @Nonnull
- PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId);
-
@Nonnull
Set toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId);
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java
index e11d1b787d9..a9e5a36610b 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java
@@ -121,8 +121,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
private BaseRuntimeChildDefinition myRangeHighValueChild;
private BaseRuntimeChildDefinition myAddressLineValueChild;
private BaseRuntimeChildDefinition myAddressCityValueChild;
- private BaseRuntimeChildDefinition myAddressStateValueChild;
private BaseRuntimeChildDefinition myAddressDistrictValueChild;
+ private BaseRuntimeChildDefinition myAddressStateValueChild;
private BaseRuntimeChildDefinition myAddressCountryValueChild;
private BaseRuntimeChildDefinition myAddressPostalCodeValueChild;
private BaseRuntimeChildDefinition myCapabilityStatementRestSecurityServiceValueChild;
@@ -1945,6 +1945,7 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
IPrimitiveType nextBaseDateTime = (IPrimitiveType) theValue;
if (nextBaseDateTime.getValue() != null) {
myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValueAsString());
+ ourLog.trace("DateExtractor - extracted {} for {}", nextBaseDateTime, theSearchParam.getName());
theParams.add(myIndexedSearchParamDate);
}
}
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java
new file mode 100644
index 00000000000..78d288e67b8
--- /dev/null
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/CrossPartitionReferenceDetails.java
@@ -0,0 +1,78 @@
+package ca.uhn.fhir.jpa.searchparam.extractor;
+
+/*-
+ * #%L
+ * HAPI FHIR Search Parameters
+ * %%
+ * Copyright (C) 2014 - 2023 Smile CDR, Inc.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
+
+import javax.annotation.Nonnull;
+
+public class CrossPartitionReferenceDetails {
+
+ @Nonnull
+ private final RequestPartitionId mySourceResourcePartitionId;
+ @Nonnull
+ private final PathAndRef myPathAndRef;
+ @Nonnull
+ private final RequestDetails myRequestDetails;
+ @Nonnull
+ private final TransactionDetails myTransactionDetails;
+ @Nonnull
+ private final String mySourceResourceName;
+
+ /**
+ * Constructor
+ */
+ public CrossPartitionReferenceDetails(@Nonnull RequestPartitionId theSourceResourcePartitionId, @Nonnull String theSourceResourceName, @Nonnull PathAndRef thePathAndRef, @Nonnull RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) {
+ mySourceResourcePartitionId = theSourceResourcePartitionId;
+ mySourceResourceName = theSourceResourceName;
+ myPathAndRef = thePathAndRef;
+ myRequestDetails = theRequestDetails;
+ myTransactionDetails = theTransactionDetails;
+ }
+
+ @Nonnull
+ public String getSourceResourceName() {
+ return mySourceResourceName;
+ }
+
+ @Nonnull
+ public RequestDetails getRequestDetails() {
+ return myRequestDetails;
+ }
+
+ @Nonnull
+ public TransactionDetails getTransactionDetails() {
+ return myTransactionDetails;
+ }
+
+ @Nonnull
+ public RequestPartitionId getSourceResourcePartitionId() {
+ return mySourceResourcePartitionId;
+ }
+
+ @Nonnull
+ public PathAndRef getPathAndRef() {
+ return myPathAndRef;
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java
index 97aaa45408f..8bdafc19604 100644
--- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java
+++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorService.java
@@ -31,8 +31,8 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
-import ca.uhn.fhir.jpa.model.entity.BasePartitionable;
import ca.uhn.fhir.jpa.model.dao.JpaPid;
+import ca.uhn.fhir.jpa.model.entity.BasePartitionable;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.IResourceIndexComboSearchParameter;
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
@@ -50,6 +50,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.parser.DataFormatException;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
@@ -71,8 +72,8 @@ import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
-import java.util.Set;
import java.util.Optional;
+import java.util.Set;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@@ -93,6 +94,8 @@ public class SearchParamExtractorService {
private PartitionSettings myPartitionSettings;
@Autowired(required = false)
private IResourceLinkResolver myResourceLinkResolver;
+ @Autowired
+ private IRequestPartitionHelperSvc myPartitionHelperSvc;
@VisibleForTesting
public void setSearchParamExtractor(ISearchParamExtractor theSearchParamExtractor) {
@@ -436,7 +439,7 @@ public class SearchParamExtractorService {
* one we already have.
*/
Optional optionalResourceLink = findMatchingResourceLink(thePathAndRef, theExistingParams.getResourceLinks());
- if(optionalResourceLink.isPresent()){
+ if (optionalResourceLink.isPresent()) {
resourceLink = optionalResourceLink.get();
} else {
resourceLink = resolveTargetAndCreateResourceLinkOrReturnNull(theRequestPartitionId, theSourceResourceName, thePathAndRef, theEntity, transactionDate, nextId, theRequest, theTransactionDetails);
@@ -480,7 +483,7 @@ public class SearchParamExtractorService {
boolean hasMatchingResourceVersion = myContext.getParserOptions().isStripVersionsFromReferences() || referenceElement.getVersionIdPartAsLong() == null || referenceElement.getVersionIdPartAsLong().equals(resourceLink.getTargetResourceVersion());
- if( hasMatchingSearchParamPath && hasMatchingResourceType && hasMatchingResourceId && hasMatchingResourceVersion) {
+ if (hasMatchingSearchParamPath && hasMatchingResourceType && hasMatchingResourceId && hasMatchingResourceVersion) {
return Optional.of(resourceLink);
}
}
@@ -567,19 +570,33 @@ public class SearchParamExtractorService {
* target any more times than we have to.
*/
- RequestPartitionId targetRequestPartitionId = theRequestPartitionId;
- if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.getAllowReferencesAcrossPartitions() == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) {
- targetRequestPartitionId = RequestPartitionId.allPartitions();
+ IResourceLookup targetResource;
+ if (myPartitionSettings.isPartitioningEnabled()) {
+ if (myPartitionSettings.getAllowReferencesAcrossPartitions() == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) {
+
+ // Interceptor: Pointcut.JPA_CROSS_PARTITION_REFERENCE_DETECTED
+ if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, myInterceptorBroadcaster, theRequest)) {
+ CrossPartitionReferenceDetails referenceDetails = new CrossPartitionReferenceDetails(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
+ HookParams params = new HookParams(referenceDetails);
+ targetResource = (IResourceLookup) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, params);
+ } else {
+ targetResource = myResourceLinkResolver.findTargetResource(RequestPartitionId.allPartitions(), theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
+ }
+
+ } else {
+ targetResource = myResourceLinkResolver.findTargetResource(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
+ }
+ } else {
+ targetResource = myResourceLinkResolver.findTargetResource(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
}
- IResourceLookup targetResource = myResourceLinkResolver.findTargetResource(targetRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
if (targetResource == null) {
return null;
}
String targetResourceType = targetResource.getResourceType();
- Long targetResourcePid = ((JpaPid) targetResource.getPersistentId()).getId();
+ Long targetResourcePid = targetResource.getPersistentId().getId();
String targetResourceIdPart = theNextId.getIdPart();
Long targetVersion = theNextId.getVersionIdPartAsLong();
return ResourceLink.forLocalReference(thePathAndRef.getPath(), theEntity, targetResourceType, targetResourcePid, targetResourceIdPart, theUpdateTime, targetVersion);
@@ -597,8 +614,8 @@ public class SearchParamExtractorService {
for (IResourceIndexComboSearchParameter next : theParams) {
if (next.getResource() == null) {
next.setResource(theResourceTable);
- if (next instanceof BasePartitionable){
- ((BasePartitionable)next).setPartitionId(theResourceTable.getPartitionId());
+ if (next instanceof BasePartitionable) {
+ ((BasePartitionable) next).setPartitionId(theResourceTable.getPartitionId());
}
}
}
@@ -671,14 +688,16 @@ public class SearchParamExtractorService {
}
// If extraction generated any warnings, broadcast an error
- for (String next : theSearchParamSet.getWarnings()) {
- StorageProcessingMessage messageHolder = new StorageProcessingMessage();
- messageHolder.setMessage(next);
- HookParams params = new HookParams()
- .add(RequestDetails.class, theRequestDetails)
- .addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
- .add(StorageProcessingMessage.class, messageHolder);
- CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params);
+ if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING, theInterceptorBroadcaster, theRequestDetails)) {
+ for (String next : theSearchParamSet.getWarnings()) {
+ StorageProcessingMessage messageHolder = new StorageProcessingMessage();
+ messageHolder.setMessage(next);
+ HookParams params = new HookParams()
+ .add(RequestDetails.class, theRequestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
+ .add(StorageProcessingMessage.class, messageHolder);
+ CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params);
+ }
}
}
}
diff --git a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorServiceTest.java b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorServiceTest.java
index 36fe6d35eed..5472ff8d328 100644
--- a/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorServiceTest.java
+++ b/hapi-fhir-jpaserver-searchparam/src/test/java/ca/uhn/fhir/jpa/searchparam/extractor/SearchParamExtractorServiceTest.java
@@ -36,9 +36,11 @@ public class SearchParamExtractorServiceTest {
searchParamSet.addWarning("help i'm a bug");
searchParamSet.addWarning("Spiff");
+ when(myJpaInterceptorBroadcaster.hasHooks(any())).thenReturn(true);
when(myJpaInterceptorBroadcaster.callHooks(any(), any())).thenReturn(true);
- SearchParamExtractorService.handleWarnings(new ServletRequestDetails(myRequestInterceptorBroadcaster), myJpaInterceptorBroadcaster, searchParamSet);
+ ServletRequestDetails requestDetails = new ServletRequestDetails(myRequestInterceptorBroadcaster);
+ SearchParamExtractorService.handleWarnings(requestDetails, myJpaInterceptorBroadcaster, searchParamSet);
verify(myJpaInterceptorBroadcaster, times(2)).callHooks(eq(Pointcut.JPA_PERFTRACE_WARNING), any());
verify(myRequestInterceptorBroadcaster, times(2)).callHooks(eq(Pointcut.JPA_PERFTRACE_WARNING), any());
diff --git a/hapi-fhir-jpaserver-subscription/pom.xml b/hapi-fhir-jpaserver-subscription/pom.xml
index 508e1d29417..ac1c831d20d 100644
--- a/hapi-fhir-jpaserver-subscription/pom.xml
+++ b/hapi-fhir-jpaserver-subscription/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java
index 52bfbd4e7ec..3481c0b49f8 100644
--- a/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java
+++ b/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/triggering/SubscriptionTriggeringSvcImpl.java
@@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.api.svc.ISearchSvc;
+import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
@@ -103,6 +104,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
private MatchUrlService myMatchUrlService;
@Autowired
private IResourceModifiedConsumer myResourceModifiedConsumer;
+ @Autowired
+ private HapiTransactionService myTransactionService;
private int myMaxSubmitPerPass = DEFAULT_MAX_SUBMIT;
private ExecutorService myExecutorService;
@@ -150,8 +153,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
SubscriptionTriggeringJobDetails jobDetails = new SubscriptionTriggeringJobDetails();
jobDetails.setJobId(UUID.randomUUID().toString());
- jobDetails.setRemainingResourceIds(resourceIds.stream().map(t -> t.getValue()).collect(Collectors.toList()));
- jobDetails.setRemainingSearchUrls(searchUrls.stream().map(t -> t.getValue()).collect(Collectors.toList()));
+ jobDetails.setRemainingResourceIds(resourceIds.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()));
+ jobDetails.setRemainingSearchUrls(searchUrls.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()));
if (theSubscriptionId != null) {
jobDetails.setSubscriptionId(theSubscriptionId.getIdPart());
}
@@ -329,12 +332,17 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
IFhirResourceDao> resourceDao = myDaoRegistry.getResourceDao(theJobDetails.getCurrentSearchResourceType());
int maxQuerySize = myMaxSubmitPerPass - totalSubmitted;
- int toIndex = fromIndex + maxQuerySize;
+ int toIndex;
if (theJobDetails.getCurrentSearchCount() != null) {
- toIndex = Math.min(toIndex, theJobDetails.getCurrentSearchCount());
+ toIndex = Math.min(fromIndex + maxQuerySize, theJobDetails.getCurrentSearchCount());
+ } else {
+ toIndex = fromIndex + maxQuerySize;
}
+
ourLog.info("Triggering job[{}] search {} requesting resources {} - {}", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex);
- List resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex, null);
+ List> resourceIds;
+ RequestPartitionId requestPartitionId = RequestPartitionId.allPartitions();
+ resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex, null, requestPartitionId);
ourLog.info("Triggering job[{}] delivering {} resources", theJobDetails.getJobId(), resourceIds.size());
int highestIndexSubmitted = theJobDetails.getCurrentSearchLastUploadedIndex();
diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcherTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcherTest.java
index 975b78a6565..e16eca02d8a 100644
--- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcherTest.java
+++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/match/matcher/matching/DaoSubscriptionMatcherTest.java
@@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamProvider;
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
@@ -76,6 +77,11 @@ public class DaoSubscriptionMatcherTest {
return mock(IResourceVersionSvc.class, RETURNS_DEEP_STUBS);
}
+ @Bean
+ public IRequestPartitionHelperSvc requestPartitionHelperSvc() {
+ return mock(IRequestPartitionHelperSvc.class);
+ }
+
}
}
diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/BaseSubscriptionTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/BaseSubscriptionTest.java
index de4f081da57..0b83e0cbe57 100644
--- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/BaseSubscriptionTest.java
+++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/BaseSubscriptionTest.java
@@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.subscription.module;
import ca.uhn.fhir.interceptor.api.IInterceptorService;
import ca.uhn.fhir.interceptor.executor.InterceptorService;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelFactory;
@@ -25,6 +26,8 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
import java.util.Collections;
+import static org.mockito.Mockito.mock;
+
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {
SearchParamConfig.class,
@@ -84,6 +87,11 @@ public abstract class BaseSubscriptionTest {
return new InterceptorService();
}
+ @Bean
+ public IRequestPartitionHelperSvc requestPartitionHelperSvc() {
+ return mock(IRequestPartitionHelperSvc.class);
+ }
+
@Bean
// Default implementation returns the name unchanged
public IChannelNamer channelNamer() {
diff --git a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java
index 73e839509c2..fecdfdceb98 100644
--- a/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java
+++ b/hapi-fhir-jpaserver-subscription/src/test/java/ca/uhn/fhir/jpa/subscription/module/subscriber/SubscriptionMatchingSubscriberTest.java
@@ -4,6 +4,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionCriteriaParser;
import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionMatchingSubscriber;
@@ -20,14 +21,13 @@ import com.google.common.collect.Lists;
import org.hl7.fhir.dstu3.model.BooleanType;
import org.hl7.fhir.dstu3.model.Observation;
import org.hl7.fhir.dstu3.model.Subscription;
+import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.Mockito;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.springframework.test.util.ReflectionTestUtils;
import java.util.Collections;
@@ -46,7 +46,6 @@ import static org.mockito.Mockito.when;
* Tests copied from jpa.subscription.resthook.RestHookTestDstu3Test
*/
public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscribableChannelDstu3Test {
- private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionMatchingSubscriberTest.class);
private final IFhirResourceDao myMockSubscriptionDao = Mockito.mock(IFhirResourceDao.class);
@BeforeEach
@@ -55,6 +54,11 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
myDaoRegistry.register(myMockSubscriptionDao);
}
+ @AfterEach
+ public void afterEach() {
+ myStorageSettings.setCrossPartitionSubscriptionEnabled(new JpaStorageSettings().isCrossPartitionSubscriptionEnabled());
+ }
+
@Test
public void testRestHookSubscriptionApplicationFhirJson() throws Exception {
String payload = "application/fhir+json";
@@ -392,6 +396,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
ourObservationListener.awaitExpected();
}
+ private void mockSubscriptionRead(RequestPartitionId theRequestPartitionId, Subscription subscription) {
+ Subscription modifiedSubscription = subscription.copy();
+ // the original partition info was the request info, but we need the actual storage partition.
+ modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId);
+ when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any())).thenReturn(modifiedSubscription);
+ }
+
@Nested
public class TestDeleteMessages {
private final SubscriptionMatchingSubscriber subscriber = new SubscriptionMatchingSubscriber();
@@ -460,7 +471,7 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
eq(Pointcut.SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED), any(HookParams.class))).thenReturn(true);
when(message.getPayloadId(null)).thenReturn(new IdDt("Patient", 123L));
when(myNonDeleteCanonicalSubscription.getSendDeleteMessages()).thenReturn(false);
- when(mySubscriptionRegistry.getAll()).thenReturn(List.of(myNonDeleteSubscription ,myActiveSubscription));
+ when(mySubscriptionRegistry.getAll()).thenReturn(List.of(myNonDeleteSubscription, myActiveSubscription));
when(myActiveSubscription.getSubscription()).thenReturn(myCanonicalSubscription);
when(myActiveSubscription.getCriteria()).thenReturn(mySubscriptionCriteria);
when(myActiveSubscription.getId()).thenReturn("Patient/123");
@@ -496,12 +507,4 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
verify(message, atLeastOnce()).getPayloadId(null);
}
}
-
-
- private void mockSubscriptionRead(RequestPartitionId theRequestPartitionId, Subscription subscription) {
- Subscription modifiedSubscription = subscription.copy();
- // the original partition info was the request info, but we need the actual storage partition.
- modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId);
- when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any())).thenReturn(modifiedSubscription);
- }
}
diff --git a/hapi-fhir-jpaserver-test-dstu2/pom.xml b/hapi-fhir-jpaserver-test-dstu2/pom.xml
index bb6c4785982..1451c2ddf25 100644
--- a/hapi-fhir-jpaserver-test-dstu2/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu2/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java
index 3d64fcad5d1..305b3a8af9d 100644
--- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java
+++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2ValidateTest.java
@@ -34,8 +34,10 @@ import static org.junit.jupiter.api.Assertions.fail;
public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2ValidateTest.class);
+ @Override
@BeforeEach
- public void before() {
+ public void before() throws Exception {
+ super.before();
myStorageSettings.setAllowExternalReferences(true);
}
diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java
index af9704df95b..0a73a7ae1a8 100644
--- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java
+++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/provider/ResourceProviderDstu2Test.java
@@ -2792,7 +2792,7 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test {
* Upload structurredef
*/
- String contents = IOUtils.toString(getClass().getResourceAsStream("/allergyintolerance-sd-david.json"), "UTF-8");
+ String contents = ClasspathUtil.loadResource("/allergyintolerance-sd-david.json");
HttpEntityEnclosingRequestBase post = new HttpPut(myServerBase + "/StructureDefinition/ohAllergyIntolerance");
post.setEntity(new StringEntity(contents, ContentType.create(Constants.CT_FHIR_JSON, "UTF-8")));
CloseableHttpResponse response = ourHttpClient.execute(post);
@@ -2809,7 +2809,7 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test {
* Validate
*/
- contents = IOUtils.toString(getClass().getResourceAsStream("/allergyintolerance-david.json"), "UTF-8");
+ contents = ClasspathUtil.loadResource("/allergyintolerance-david.json");
post = new HttpPost(myServerBase + "/AllergyIntolerance/$validate?_pretty=true");
post.setEntity(new StringEntity(contents, ContentType.create(Constants.CT_FHIR_JSON, "UTF-8")));
diff --git a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
index ab602016790..416dc6cf5b1 100644
--- a/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
+++ b/hapi-fhir-jpaserver-test-dstu2/src/test/java/ca/uhn/fhir/jpa/search/SearchCoordinatorSvcImplTest.java
@@ -80,11 +80,12 @@ import static org.mockito.Mockito.when;
@SuppressWarnings({"unchecked"})
@ExtendWith(MockitoExtension.class)
-public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
+public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSvcImplTest.class);
-
- @Mock private SearchStrategyFactory mySearchStrategyFactory;
-
+ @Spy
+ protected FhirContext myContext = FhirContext.forDstu2Cached();
+ @Mock
+ private SearchStrategyFactory mySearchStrategyFactory;
@Mock
private ISearchCacheSvc mySearchCacheSvc;
@Mock
@@ -100,9 +101,6 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
private IRequestPartitionHelperSvc myPartitionHelperSvc;
@Mock
private ISynchronousSearchSvc mySynchronousSearchSvc;
- @Spy
- protected FhirContext myContext = FhirContext.forDstu2Cached();
-
@Spy
private ExceptionService myExceptionSvc = new ExceptionService(myContext);
@@ -118,6 +116,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
@BeforeEach
public void before() {
HapiSystemProperties.enableUnitTestCaptureStack();
+ HapiSystemProperties.enableUnitTestMode();
myCurrentSearch = null;
@@ -178,7 +177,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
assertEquals(allResults.size(), oldResults.size());
allResults.addAll(newResults);
return null;
- }).when(mySearchResultCacheSvc).storeResults(any(), anyList(), anyList());
+ }).when(mySearchResultCacheSvc).storeResults(any(), anyList(), anyList(), any(), any());
SearchParameterMap params = new SearchParameterMap();
params.add("name", new StringParam("ANAME"));
@@ -233,7 +232,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
});
try {
- mySvc.getResources("1234-5678", 0, 100, null);
+ mySvc.getResources("1234-5678", 0, 100, null, null);
fail();
} catch (ResourceGoneException e) {
assertEquals("Search ID \"1234-5678\" does not exist and may have expired", e.getMessage());
@@ -255,7 +254,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
});
try {
- mySvc.getResources("1234-5678", 0, 100, null);
+ mySvc.getResources("1234-5678", 0, 100, null, null);
fail();
} catch (InternalErrorException e) {
assertThat(e.getMessage(), containsString("Request timed out"));
@@ -295,15 +294,16 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
}
private void initAsyncSearches() {
- when(myPersistedJpaBundleProviderFactory.newInstanceFirstPage(nullable(RequestDetails.class), nullable(Search.class), nullable(SearchTask.class), nullable(ISearchBuilder.class))).thenAnswer(t->{
+ when(myPersistedJpaBundleProviderFactory.newInstanceFirstPage(nullable(RequestDetails.class), nullable(Search.class), nullable(SearchTask.class), nullable(ISearchBuilder.class), nullable(RequestPartitionId.class))).thenAnswer(t -> {
RequestDetails requestDetails = t.getArgument(0, RequestDetails.class);
Search search = t.getArgument(1, Search.class);
SearchTask searchTask = t.getArgument(2, SearchTask.class);
ISearchBuilder searchBuilder = t.getArgument(3, ISearchBuilder.class);
- PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, searchTask, searchBuilder, requestDetails);
+ PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, searchTask, searchBuilder, requestDetails, null);
retVal.setStorageSettingsForUnitTest(new JpaStorageSettings());
retVal.setTxServiceForUnitTest(myTransactionService);
retVal.setSearchCoordinatorSvcForUnitTest(mySvc);
+ retVal.setRequestPartitionHelperSvcForUnitTest(myPartitionHelperSvc);
retVal.setContext(myContext);
return retVal;
});
@@ -333,7 +333,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
try {
assertNotNull(searchId);
ourLog.info("About to pull the first resource");
- List resources = mySvc.getResources(searchId, 0, 1, null);
+ List resources = mySvc.getResources(searchId, 0, 1, null, null);
ourLog.info("Done pulling the first resource");
assertEquals(1, resources.size());
} finally {
@@ -342,14 +342,14 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
};
new Thread(taskStarter).start();
- await().until(()->iter.getCountReturned() >= 3);
+ await().until(() -> iter.getCountReturned() >= 3);
ourLog.info("About to cancel all searches");
mySvc.cancelAllActiveSearches();
ourLog.info("Done cancelling all searches");
try {
- mySvc.getResources(searchId, 0, 1, null);
+ mySvc.getResources(searchId, 0, 1, null, null);
} catch (ResourceGoneException e) {
// good
}
@@ -373,9 +373,9 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
List pids = createPidSequence(800);
IResultIterator iter = new SlowIterator(pids.iterator(), 2);
when(mySearchBuilder.createQuery(same(params), any(), any(), nullable(RequestPartitionId.class))).thenReturn(iter);
- when(mySearchCacheSvc.save(any())).thenAnswer(t ->{
+ when(mySearchCacheSvc.save(any())).thenAnswer(t -> {
ourLog.info("Saving search");
- return t.getArgument( 0, Search.class);
+ return t.getArgument(0, Search.class);
});
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
@@ -448,6 +448,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
search.setSearchType(SearchTypeEnum.SEARCH);
search.setResourceType("Patient");
search.setStatus(SearchStatusEnum.LOADING);
+ search.setSearchParameterMap(new SearchParameterMap());
when(mySearchCacheSvc.fetchByUuid(eq(uuid))).thenReturn(Optional.of(search));
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
@@ -462,7 +463,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
// ignore
}
- when(mySearchResultCacheSvc.fetchResultPids(any(Search.class), anyInt(), anyInt())).thenAnswer(theInvocation -> {
+ when(mySearchResultCacheSvc.fetchResultPids(any(Search.class), anyInt(), anyInt(), any(), any())).thenAnswer(theInvocation -> {
ArrayList results = new ArrayList<>();
for (long i = theInvocation.getArgument(1, Integer.class); i < theInvocation.getArgument(2, Integer.class); i++) {
Long nextPid = i + 10L;
@@ -492,6 +493,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
provider.setSearchBuilderFactoryForUnitTest(mySearchBuilderFactory);
provider.setSearchCoordinatorSvcForUnitTest(mySvc);
provider.setStorageSettingsForUnitTest(new JpaStorageSettings());
+ provider.setRequestPartitionId(RequestPartitionId.defaultPartition());
resources = provider.getResources(20, 40);
assertEquals(20, resources.size());
assertEquals("30", resources.get(0).getIdElement().getValueAsString());
@@ -511,6 +513,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
provider.setDaoRegistryForUnitTest(myDaoRegistry);
provider.setSearchCoordinatorSvcForUnitTest(mySvc);
provider.setStorageSettingsForUnitTest(new JpaStorageSettings());
+ provider.setRequestPartitionId(RequestPartitionId.defaultPartition());
return provider;
}
@@ -566,12 +569,12 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
search.setTotalCount(100);
when(mySearchCacheSvc.fetchByUuid(eq("0000-1111"))).thenReturn(Optional.of(search));
- when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt())).thenReturn(null);
+ when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt(), any(), any())).thenReturn(null);
try {
- mySvc.getResources("0000-1111", 0, 10, null);
+ mySvc.getResources("0000-1111", 0, 10, null, null);
fail();
- } catch (ResourceGoneException e) {
+ } catch (ResourceGoneException e) {
assertEquals("Search ID \"0000-1111\" does not exist and may have expired", e.getMessage());
}
@@ -593,27 +596,70 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
search.setSearchParameterMap(new SearchParameterMap());
when(mySearchCacheSvc.fetchByUuid(eq("0000-1111"))).thenReturn(Optional.of(search));
- when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t->{
+ when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t -> {
search.setStatus(SearchStatusEnum.LOADING);
return Optional.of(search);
});
mockSearchTask();
- when(mySearchResultCacheSvc.fetchAllResultPids(any())).thenReturn(null);
+ when(mySearchResultCacheSvc.fetchAllResultPids(any(), any(), any())).thenReturn(null);
try {
- mySvc.getResources("0000-1111", 0, 10, null);
+ mySvc.getResources("0000-1111", 0, 10, null, null);
fail();
- } catch (ResourceGoneException e) {
+ } catch (ResourceGoneException e) {
assertEquals("Search ID \"0000-1111\" does not exist and may have expired", e.getMessage());
}
}
+ private void mockSearchTask() {
+ IPagingProvider pagingProvider = mock(IPagingProvider.class);
+ lenient().when(pagingProvider.getMaximumPageSize())
+ .thenReturn(500);
+ when(myBeanFactory.getBean(anyString(), any(SearchTaskParameters.class)))
+ .thenAnswer(invocation -> {
+ String type = invocation.getArgument(0);
+ switch (type) {
+ case SearchConfig.SEARCH_TASK -> {
+ return new SearchTask(
+ invocation.getArgument(1),
+ myTransactionService,
+ ourCtx,
+ myInterceptorBroadcaster,
+ mySearchBuilderFactory,
+ mySearchResultCacheSvc,
+ myStorageSettings,
+ mySearchCacheSvc,
+ pagingProvider
+ );
+ }
+ case SearchConfig.CONTINUE_TASK -> {
+ return new SearchContinuationTask(
+ invocation.getArgument(1),
+ myTransactionService,
+ ourCtx,
+ myInterceptorBroadcaster,
+ mySearchBuilderFactory,
+ mySearchResultCacheSvc,
+ myStorageSettings,
+ mySearchCacheSvc,
+ pagingProvider,
+ myExceptionSvc
+ );
+ }
+ default -> {
+ fail("Invalid bean type: " + type);
+ return null;
+ }
+ }
+ });
+ }
+
public static class FailAfterNIterator extends BaseIterator implements IResultIterator {
- private int myCount;
private final IResultIterator myWrap;
+ private int myCount;
FailAfterNIterator(IResultIterator theWrap, int theCount) {
myWrap = theWrap;
@@ -738,45 +784,4 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
// nothing
}
}
-
-
- private void mockSearchTask() {
- IPagingProvider pagingProvider = mock(IPagingProvider.class);
- lenient().when(pagingProvider.getMaximumPageSize())
- .thenReturn(500);
- when(myBeanFactory.getBean(anyString(), any(SearchTaskParameters.class)))
- .thenAnswer(invocation -> {
- String type = invocation.getArgument(0);
- switch (type) {
- case SearchConfig.SEARCH_TASK:
- return new SearchTask(
- invocation.getArgument(1),
- myTransactionService,
- ourCtx,
- myInterceptorBroadcaster,
- mySearchBuilderFactory,
- mySearchResultCacheSvc,
- myStorageSettings,
- mySearchCacheSvc,
- pagingProvider
- );
- case SearchConfig.CONTINUE_TASK:
- return new SearchContinuationTask(
- invocation.getArgument(1),
- myTransactionService,
- ourCtx,
- myInterceptorBroadcaster,
- mySearchBuilderFactory,
- mySearchResultCacheSvc,
- myStorageSettings,
- mySearchCacheSvc,
- pagingProvider,
- myExceptionSvc
- );
- default:
- fail("Invalid bean type: " + type);
- return null;
- }
- });
- }
}
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/resources/logback-test.xml b/hapi-fhir-jpaserver-test-dstu2/src/test/resources/logback-test.xml
similarity index 100%
rename from hapi-fhir-jpaserver-test-utilities/src/main/resources/logback-test.xml
rename to hapi-fhir-jpaserver-test-dstu2/src/test/resources/logback-test.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/pom.xml b/hapi-fhir-jpaserver-test-dstu3/pom.xml
index 62099e0b239..f2e29bf9cd3 100644
--- a/hapi-fhir-jpaserver-test-dstu3/pom.xml
+++ b/hapi-fhir-jpaserver-test-dstu3/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
index 71f568ec1d2..09e81fa0351 100644
--- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
+++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3Test.java
@@ -2553,17 +2553,16 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
p.addName().setFamily("testMetaAddInvalid");
IIdType id = myClient.create().resource(p).execute().getId().toUnqualifiedVersionless();
- //@formatter:off
- String input = "\n" +
- " \n" +
- " \n" +
- " \n" +
- " \n" +
- " \n" +
- "
\n" +
- " \n" +
- "";
- //@formatter:on
+ String input = """
+
+
+
+
+
+
+
+
+ """;
HttpPost post = new HttpPost(myServerBase + "/Patient/" + id.getIdPart() + "/$meta-add");
post.setEntity(new StringEntity(input, ContentType.create(Constants.CT_FHIR_XML, "UTF-8")));
diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java
index d5e38616652..888cd3b2497 100644
--- a/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java
+++ b/hapi-fhir-jpaserver-test-dstu3/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java
@@ -103,7 +103,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
myExtensionalCsId_v1 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId_v1, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId_v1, null);
theCodeSystem.setId("CodeSystem/cs2");
theCodeSystem.setVersion("2");
@@ -116,7 +116,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
myExtensionalCsId_v2 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId_v2, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId_v2, null);
}
@@ -126,13 +126,13 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
valueSet.setId("ValueSet/vs1");
valueSet.getCompose().getInclude().get(0).setVersion("1");
myExtensionalVsId_v1 = persistSingleValueSet(valueSet, HttpVerb.POST);
- myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getId();
+ myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getIdDt().getIdPartAsLong();
valueSet.setVersion("2");
valueSet.setId("ValueSet/vs2");
valueSet.getCompose().getInclude().get(0).setVersion("2");
myExtensionalVsId_v2 = persistSingleValueSet(valueSet, HttpVerb.POST);
- myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getId();
+ myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getIdDt().getIdPartAsLong();
}
diff --git a/hapi-fhir-jpaserver-test-dstu3/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-dstu3/src/test/resources/logback-test.xml
new file mode 100644
index 00000000000..dcada7fec76
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-dstu3/src/test/resources/logback-test.xml
@@ -0,0 +1,46 @@
+
+
+
+ %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hapi-fhir-jpaserver-test-r4/pom.xml b/hapi-fhir-jpaserver-test-r4/pom.xml
index cb74e3b903a..fac4daeeba7 100644
--- a/hapi-fhir-jpaserver-test-r4/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java
index 1e7d48945d6..8b40f551501 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/batch2/Batch2CoordinatorIT.java
@@ -88,10 +88,14 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
return RunOutcome.SUCCESS;
}
+ @Override
@BeforeEach
- public void before() {
+ public void before() throws Exception {
+ super.before();
+
myCompletionHandler = details -> {};
myWorkChannel = (LinkedBlockingChannel) myChannelFactory.getOrCreateReceiver(CHANNEL_NAME, JobWorkNotificationJsonMessage.class, new ChannelConsumerSettings());
+ myStorageSettings.setJobFastTrackingEnabled(true);
}
@AfterEach
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
index 8dda2c2020c..580981059a9 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/bulk/BulkDataExportProviderTest.java
@@ -17,9 +17,9 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.client.apache.ResourceEntity;
import ca.uhn.fhir.rest.server.HardcodedServerAddressStrategy;
-import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
-import ca.uhn.fhir.test.utilities.JettyUtil;
+import ca.uhn.fhir.test.utilities.HttpClientExtension;
+import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
import ca.uhn.fhir.util.JsonUtil;
import ca.uhn.fhir.util.UrlUtil;
import com.google.common.base.Charsets;
@@ -28,20 +28,14 @@ import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.servlet.ServletHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.InstantType;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.StringType;
-import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
@@ -54,12 +48,12 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
-import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
import static org.hamcrest.MatcherAssert.assertThat;
@@ -89,53 +83,19 @@ public class BulkDataExportProviderTest {
private static final Logger ourLog = LoggerFactory.getLogger(BulkDataExportProviderTest.class);
private static final String GROUP_ID = "Group/G2401";
private static final String G_JOB_ID = "0000000-GGGGGG";
- private Server myServer;
@Spy
private final FhirContext myCtx = FhirContext.forR4Cached();
- private int myPort;
-
+ @RegisterExtension
+ private final HttpClientExtension myClient = new HttpClientExtension();
@Mock
private IBatch2JobRunner myJobRunner;
-
- private JpaStorageSettings myStorageSettings;
- private DaoRegistry myDaoRegistry;
- private CloseableHttpClient myClient;
-
@InjectMocks
private BulkDataExportProvider myProvider;
- private RestfulServer servlet;
-
- static Stream paramsProvider(){
- return Stream.of(
- Arguments.arguments(true),
- Arguments.arguments(false)
- );
- }
-
- @AfterEach
- public void after() throws Exception {
- JettyUtil.closeServer(myServer);
- myClient.close();
- }
-
- @BeforeEach
- public void start() throws Exception {
- myServer = new Server(0);
-
- ServletHandler proxyHandler = new ServletHandler();
- servlet = new RestfulServer(myCtx);
- servlet.registerProvider(myProvider);
- ServletHolder servletHolder = new ServletHolder(servlet);
- proxyHandler.addServletWithMapping(servletHolder, "/*");
- myServer.setHandler(proxyHandler);
- JettyUtil.startServer(myServer);
- myPort = JettyUtil.getPortForStartedServer(myServer);
-
- PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
- HttpClientBuilder builder = HttpClientBuilder.create();
- builder.setConnectionManager(connectionManager);
- myClient = builder.build();
- }
+ @RegisterExtension
+ private final RestfulServerExtension myServer = new RestfulServerExtension(myCtx)
+ .withServer(s -> s.registerProvider(myProvider));
+ private JpaStorageSettings myStorageSettings;
+ private DaoRegistry myDaoRegistry;
@BeforeEach
public void injectStorageSettings() {
@@ -147,9 +107,9 @@ public class BulkDataExportProviderTest {
}
public void startWithFixedBaseUrl() {
- String baseUrl = "http://localhost:" + myPort + "/fixedvalue";
+ String baseUrl = myServer.getBaseUrl() + "/fixedvalue";
HardcodedServerAddressStrategy hardcodedServerAddressStrategy = new HardcodedServerAddressStrategy(baseUrl);
- servlet.setServerAddressStrategy(hardcodedServerAddressStrategy);
+ myServer.withServer(s -> s.setServerAddressStrategy(hardcodedServerAddressStrategy));
}
private BulkExportParameters verifyJobStart() {
@@ -196,7 +156,7 @@ public class BulkDataExportProviderTest {
ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
// test
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
ourLog.info("Request: {}", post);
@@ -205,7 +165,7 @@ public class BulkDataExportProviderTest {
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
BulkExportParameters params = verifyJobStart();
@@ -223,7 +183,7 @@ public class BulkDataExportProviderTest {
.thenReturn(createJobStartResponse());
Parameters input = new Parameters();
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
@@ -243,7 +203,7 @@ public class BulkDataExportProviderTest {
InstantType now = InstantType.now();
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
+ "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner")
+ "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString())
@@ -257,7 +217,7 @@ public class BulkDataExportProviderTest {
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
BulkExportParameters params = verifyJobStart();
@@ -272,7 +232,7 @@ public class BulkDataExportProviderTest {
when(myJobRunner.startNewJob(any()))
.thenReturn(createJobStartResponse());
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
+ "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient,EpisodeOfCare")
+ "&" + JpaConstants.PARAM_EXPORT_TYPE_FILTER + "=" + UrlUtil.escapeUrlParam("Patient?_id=P999999990")
@@ -286,7 +246,7 @@ public class BulkDataExportProviderTest {
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
BulkExportParameters params = verifyJobStart();
@@ -309,7 +269,7 @@ public class BulkDataExportProviderTest {
.thenReturn(info);
// test
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
@@ -338,7 +298,7 @@ public class BulkDataExportProviderTest {
.thenReturn(info);
// call
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
@@ -382,7 +342,7 @@ public class BulkDataExportProviderTest {
.thenReturn(info);
// call
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
@@ -398,11 +358,11 @@ public class BulkDataExportProviderTest {
BulkExportResponseJson responseJson = JsonUtil.deserialize(responseContent, BulkExportResponseJson.class);
assertEquals(3, responseJson.getOutput().size());
assertEquals("Patient", responseJson.getOutput().get(0).getType());
- assertEquals("http://localhost:" + myPort + "/Binary/111", responseJson.getOutput().get(0).getUrl());
+ assertEquals(myServer.getBaseUrl() + "/Binary/111", responseJson.getOutput().get(0).getUrl());
assertEquals("Patient", responseJson.getOutput().get(1).getType());
- assertEquals("http://localhost:" + myPort + "/Binary/222", responseJson.getOutput().get(1).getUrl());
+ assertEquals(myServer.getBaseUrl() + "/Binary/222", responseJson.getOutput().get(1).getUrl());
assertEquals("Patient", responseJson.getOutput().get(2).getType());
- assertEquals("http://localhost:" + myPort + "/Binary/333", responseJson.getOutput().get(2).getUrl());
+ assertEquals(myServer.getBaseUrl() + "/Binary/333", responseJson.getOutput().get(2).getUrl());
}
}
@@ -427,7 +387,7 @@ public class BulkDataExportProviderTest {
.thenReturn(info);
// test
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
@@ -453,7 +413,7 @@ public class BulkDataExportProviderTest {
when(myJobRunner.getJobInfo(anyString()))
.thenThrow(new ResourceNotFoundException("Unknown job: AAA"));
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
@@ -464,13 +424,13 @@ public class BulkDataExportProviderTest {
assertEquals(404, response.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON_NEW, response.getEntity().getContentType().getValue().replaceAll(";.*", "").trim());
- assertThat(responseContent, containsString("\"diagnostics\":\"Unknown job: AAA\""));
+ assertThat(responseContent, containsString("\"diagnostics\": \"Unknown job: AAA\""));
}
}
/**
* Group export tests
- * See https://build.fhir.org/ig/HL7/us-bulk-data/
+ * See Bulk Data IG
*
* GET [fhir base]/Group/[id]/$export
*
@@ -496,7 +456,7 @@ public class BulkDataExportProviderTest {
ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
// call
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
ourLog.info("Request: {}", post);
@@ -504,7 +464,7 @@ public class BulkDataExportProviderTest {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
// verify
@@ -525,7 +485,7 @@ public class BulkDataExportProviderTest {
InstantType now = InstantType.now();
- String url = "http://localhost:" + myPort + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT
+ String url = myServer.getBaseUrl() + "/" + GROUP_ID + "/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
+ "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Patient, Practitioner")
+ "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString())
@@ -541,7 +501,7 @@ public class BulkDataExportProviderTest {
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + G_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
BulkExportParameters bp = verifyJobStart();
@@ -559,7 +519,7 @@ public class BulkDataExportProviderTest {
InstantType now = InstantType.now();
// manual construct
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
+ "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("Immunization, Observation")
+ "&" + JpaConstants.PARAM_EXPORT_SINCE + "=" + UrlUtil.escapeUrlParam(now.getValueAsString());
@@ -567,44 +527,44 @@ public class BulkDataExportProviderTest {
String immunizationTypeFilter1 = "Immunization?patient.identifier=SC378274-MRN|009999997,SC378274-MRN|009999998,SC378274-MRN|009999999&date=2020-01-02";
String immunizationTypeFilter2 = "Immunization?patient=Patient/123";
String observationFilter1 = "Observation?subject=Patient/123&created=ge2020-01-01";
- StringBuilder multiValuedTypeFilterBuilder = new StringBuilder()
- .append("&")
- .append(JpaConstants.PARAM_EXPORT_TYPE_FILTER)
- .append("=")
- .append(UrlUtil.escapeUrlParam(immunizationTypeFilter1))
- .append(",")
- .append(UrlUtil.escapeUrlParam(immunizationTypeFilter2))
- .append(",")
- .append(UrlUtil.escapeUrlParam(observationFilter1));
+ String multiValuedTypeFilterBuilder = "&" +
+ JpaConstants.PARAM_EXPORT_TYPE_FILTER +
+ "=" +
+ UrlUtil.escapeUrlParam(immunizationTypeFilter1) +
+ "," +
+ UrlUtil.escapeUrlParam(immunizationTypeFilter2) +
+ "," +
+ UrlUtil.escapeUrlParam(observationFilter1);
- url += multiValuedTypeFilterBuilder.toString();
+ url += multiValuedTypeFilterBuilder;
// call
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
- myClient.execute(get);
-
- // verify
- BulkExportParameters bp = verifyJobStart();
- assertThat(bp.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1));
+ try (CloseableHttpResponse ignored = myClient.execute(get)) {
+ // verify
+ BulkExportParameters bp = verifyJobStart();
+ assertThat(bp.getFilters(), containsInAnyOrder(immunizationTypeFilter1, immunizationTypeFilter2, observationFilter1));
+ }
}
@Test
public void testInitiateGroupExportWithInvalidResourceTypesFails() throws IOException {
// when
- String url = "http://localhost:" + myPort + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT
+ String url = myServer.getBaseUrl() + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON)
+ "&" + JpaConstants.PARAM_EXPORT_TYPE + "=" + UrlUtil.escapeUrlParam("StructureDefinition,Observation");
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
- CloseableHttpResponse execute = myClient.execute(get);
- String responseBody = IOUtils.toString(execute.getEntity().getContent());
+ try (CloseableHttpResponse execute = myClient.execute(get)) {
+ String responseBody = IOUtils.toString(execute.getEntity().getContent(), StandardCharsets.UTF_8);
- // verify
- assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400)));
- assertThat(responseBody, is(containsString("Resource types [StructureDefinition] are invalid for this type of export, as they do not contain search parameters that refer to patients.")));
+ // verify
+ assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(400)));
+ assertThat(responseBody, is(containsString("Resource types [StructureDefinition] are invalid for this type of export, as they do not contain search parameters that refer to patients.")));
+ }
}
@Test
@@ -614,22 +574,23 @@ public class BulkDataExportProviderTest {
.thenReturn(createJobStartResponse());
// test
- String url = "http://localhost:" + myPort + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT
+ String url = myServer.getBaseUrl() + "/" + "Group/123/" + JpaConstants.OPERATION_EXPORT
+ "?" + JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT + "=" + UrlUtil.escapeUrlParam(Constants.CT_FHIR_NDJSON);
HttpGet get = new HttpGet(url);
get.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
- CloseableHttpResponse execute = myClient.execute(get);
+ try (CloseableHttpResponse execute = myClient.execute(get)) {
- // verify
- assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(202)));
- final BulkExportParameters bulkExportParameters = verifyJobStart();
+ // verify
+ assertThat(execute.getStatusLine().getStatusCode(), is(equalTo(202)));
+ final BulkExportParameters bulkExportParameters = verifyJobStart();
- assertAll(
- () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Patient")),
- () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Group")),
- () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Device"))
- );
+ assertAll(
+ () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Patient")),
+ () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Group")),
+ () -> assertTrue(bulkExportParameters.getResourceTypes().contains("Device"))
+ );
+ }
}
@Test
@@ -645,7 +606,7 @@ public class BulkDataExportProviderTest {
ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
// call
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
ourLog.info("Request: {}", post);
@@ -654,7 +615,7 @@ public class BulkDataExportProviderTest {
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
// verify
@@ -674,7 +635,7 @@ public class BulkDataExportProviderTest {
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
// call
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/Patient/" + JpaConstants.OPERATION_EXPORT);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/Patient/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
ourLog.info("Request: {}", post);
@@ -682,7 +643,7 @@ public class BulkDataExportProviderTest {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
BulkExportParameters bp = verifyJobStart();
@@ -707,7 +668,7 @@ public class BulkDataExportProviderTest {
ourLog.debug(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
// call
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/Patient/" + JpaConstants.OPERATION_EXPORT);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/Patient/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
ourLog.info("Request: {}", post);
@@ -715,7 +676,7 @@ public class BulkDataExportProviderTest {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
BulkExportParameters bp = verifyJobStart();
@@ -740,7 +701,7 @@ public class BulkDataExportProviderTest {
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner"));
// call
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_NO_CACHE);
post.setEntity(new ResourceEntity(myCtx, input));
@@ -749,7 +710,7 @@ public class BulkDataExportProviderTest {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
// verify
@@ -757,7 +718,6 @@ public class BulkDataExportProviderTest {
assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false)));
}
-
@Test
public void testProvider_whenEnableBatchJobReuseIsFalse_startsNewJob() throws IOException {
// setup
@@ -775,7 +735,7 @@ public class BulkDataExportProviderTest {
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Patient, Practitioner"));
// call
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
ourLog.info("Request: {}", post);
@@ -783,7 +743,7 @@ public class BulkDataExportProviderTest {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + A_JOB_ID, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
// verify
@@ -791,7 +751,6 @@ public class BulkDataExportProviderTest {
assertThat(parameters.isUseExistingJobsFirst(), is(equalTo(false)));
}
-
@Test
public void testProviderReturnsSameIdForSameJob() throws IOException {
// given
@@ -831,7 +790,7 @@ public class BulkDataExportProviderTest {
.thenReturn(result);
// call
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpDelete delete = new HttpDelete(url);
try (CloseableHttpResponse response = myClient.execute(delete)) {
@@ -860,7 +819,7 @@ public class BulkDataExportProviderTest {
.thenReturn(info);
// call
- String url = "http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
+ String url = myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" +
JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + A_JOB_ID;
HttpDelete delete = new HttpDelete(url);
try (CloseableHttpResponse response = myClient.execute(delete)) {
@@ -884,7 +843,7 @@ public class BulkDataExportProviderTest {
.thenReturn(createJobStartResponse());
// call
- final HttpGet httpGet = new HttpGet(String.format("http://localhost:%s/%s", myPort, JpaConstants.OPERATION_EXPORT));
+ final HttpGet httpGet = new HttpGet(String.format("http://localhost:%s/%s", myServer.getPort(), JpaConstants.OPERATION_EXPORT));
httpGet.addHeader("_outputFormat", Constants.CT_FHIR_NDJSON);
httpGet.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
@@ -892,7 +851,7 @@ public class BulkDataExportProviderTest {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals(String.format("http://localhost:%s/$export-poll-status?_jobId=%s", myPort, A_JOB_ID), response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(String.format("http://localhost:%s/$export-poll-status?_jobId=%s", myServer.getPort(), A_JOB_ID), response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
assertTrue(IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8).isEmpty());
}
@@ -907,14 +866,14 @@ public class BulkDataExportProviderTest {
.thenReturn(createJobStartResponse());
// call
- final HttpGet httpGet = new HttpGet(String.format("http://localhost:%s/%s?_outputFormat=%s", myPort, JpaConstants.OPERATION_EXPORT, Constants.CT_FHIR_NDJSON));
+ final HttpGet httpGet = new HttpGet(String.format("http://localhost:%s/%s?_outputFormat=%s", myServer.getPort(), JpaConstants.OPERATION_EXPORT, Constants.CT_FHIR_NDJSON));
httpGet.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
try (CloseableHttpResponse response = myClient.execute(httpGet)) {
assertAll(
() -> assertEquals(202, response.getStatusLine().getStatusCode()),
() -> assertEquals("Accepted", response.getStatusLine().getReasonPhrase()),
- () -> assertEquals(String.format("http://localhost:%s/$export-poll-status?_jobId=%s", myPort, A_JOB_ID), response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()),
+ () -> assertEquals(String.format("http://localhost:%s/$export-poll-status?_jobId=%s", myServer.getPort(), A_JOB_ID), response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue()),
() -> assertTrue(IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8).isEmpty())
);
}
@@ -933,7 +892,7 @@ public class BulkDataExportProviderTest {
input.addParameter(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, new StringType(jobId));
// Initiate Export Poll Status
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
@@ -962,7 +921,7 @@ public class BulkDataExportProviderTest {
input.addParameter(JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID, new StringType(A_JOB_ID));
// Initiate Export Poll Status
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
@@ -980,7 +939,7 @@ public class BulkDataExportProviderTest {
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(ca.uhn.fhir.rest.api.Constants.CT_FHIR_NDJSON));
// Initiate Export Poll Status
- HttpPost post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS);
+ HttpPost post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.setEntity(new ResourceEntity(myCtx, input));
@@ -992,7 +951,7 @@ public class BulkDataExportProviderTest {
private void callExportAndAssertJobId(Parameters input, String theExpectedJobId) throws IOException {
HttpPost post;
- post = new HttpPost("http://localhost:" + myPort + "/" + JpaConstants.OPERATION_EXPORT);
+ post = new HttpPost(myServer.getBaseUrl() + "/" + JpaConstants.OPERATION_EXPORT);
post.addHeader(Constants.HEADER_PREFER, Constants.HEADER_PREFER_RESPOND_ASYNC);
post.addHeader(Constants.HEADER_CACHE_CONTROL, Constants.CACHE_CONTROL_NO_CACHE);
post.setEntity(new ResourceEntity(myCtx, input));
@@ -1001,9 +960,16 @@ public class BulkDataExportProviderTest {
ourLog.info("Response: {}", response.toString());
assertEquals(202, response.getStatusLine().getStatusCode());
assertEquals("Accepted", response.getStatusLine().getReasonPhrase());
- assertEquals("http://localhost:" + myPort + "/$export-poll-status?_jobId=" + theExpectedJobId, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
+ assertEquals(myServer.getBaseUrl() + "/$export-poll-status?_jobId=" + theExpectedJobId, response.getFirstHeader(Constants.HEADER_CONTENT_LOCATION).getValue());
}
}
+ static Stream paramsProvider() {
+ return Stream.of(
+ Arguments.arguments(true),
+ Arguments.arguments(false)
+ );
+ }
+
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java
index 3d68e7f458d..b54610ea2ad 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/BasePartitioningR4Test.java
@@ -212,8 +212,10 @@ public abstract class BasePartitioningR4Test extends BaseJpaR4SystemTest {
throw new Exception();
} catch (Exception e) {
stack = StackTraceHelper.getStackAsString(e);
- int lastWantedNewLine = StringUtils.ordinalIndexOf(stack, "\n", 15);
- stack = stack.substring(0, lastWantedNewLine);
+ int lastWantedNewLine = StringUtils.ordinalIndexOf(stack, "\n", 25);
+ if (lastWantedNewLine != -1) {
+ stack = stack.substring(0, lastWantedNewLine);
+ }
}
RequestPartitionId retVal = myReadRequestPartitionIds.remove(0);
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ExternalReferenceTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ExternalReferenceTest.java
index e37ccc68aa3..1393eb1a0de 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ExternalReferenceTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ExternalReferenceTest.java
@@ -36,7 +36,7 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test {
myStorageSettings.setAllowExternalReferences(new JpaStorageSettings().isAllowExternalReferences());
myStorageSettings.setTreatBaseUrlsAsLocal(null);
}
-
+
@Test
public void testInternalReferenceBlockedByDefault() {
Patient p = new Patient();
@@ -55,7 +55,7 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test {
org.setId("FOO");
org.setName("Org Name");
myOrganizationDao.update(org, mySrd);
-
+
Patient p = new Patient();
p.getManagingOrganization().setReference("http://example.com/base/Organization/FOO");
try {
@@ -78,7 +78,7 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test {
Patient p = new Patient();
p.getManagingOrganization().setReference("http://example.com/base/Organization/FOO");
IIdType pid = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
-
+
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_ORGANIZATION, new ReferenceParam("http://example.com/base/Organization/FOO"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(pid.getValue()));
@@ -103,12 +103,12 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test {
Patient p = new Patient();
p.getManagingOrganization().setReference("http://example.com/base/Organization/FOO");
IIdType pid = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
-
+
p = myPatientDao.read(pid, mySrd);
assertEquals("Organization/FOO", p.getManagingOrganization().getReference());
-
+
SearchParameterMap map;
-
+
map = new SearchParameterMap();
map.add(Patient.SP_ORGANIZATION, new ReferenceParam("http://example.com/base/Organization/FOO"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), contains(pid.getValue()));
@@ -117,11 +117,11 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test {
@Test
public void testSearchForInvalidLocalReference() {
SearchParameterMap map;
-
+
map = new SearchParameterMap();
map.add(Patient.SP_ORGANIZATION, new ReferenceParam("Organization/FOO"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), empty());
-
+
map = new SearchParameterMap();
map.add(Patient.SP_ORGANIZATION, new ReferenceParam("Organization/9999999999"));
assertThat(toUnqualifiedVersionlessIdValues(myPatientDao.search(map)), empty());
@@ -142,10 +142,10 @@ public class FhirResourceDaoR4ExternalReferenceTest extends BaseJpaR4Test {
Patient p = new Patient();
p.getManagingOrganization().setReference("http://example.com/base/Organization/FOO");
IIdType pid = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
-
+
p = myPatientDao.read(pid, mySrd);
assertEquals("Organization/FOO", p.getManagingOrganization().getReference());
-
+
SearchParameterMap map;
// Different base
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java
index 5ebe63379f4..6cc69a71556 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchNoFtTest.java
@@ -2531,16 +2531,27 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
}
+ @Test
+ public void testFetchMultiplePages() {
+ create9Patients();
+
+ myStorageSettings.setSearchPreFetchThresholds(Lists.newArrayList(3, 6, 10));
+
+ IBundleProvider search = myPatientDao.search(new SearchParameterMap(), mySrd);
+ List resources = search.getResources(0, 3);
+ assertEquals(3, resources.size());
+
+ IBundleProvider search2 = myPagingProvider.retrieveResultList(mySrd, search.getUuid());
+ resources = search2.getResources(3, 99);
+ assertEquals(6, resources.size());
+ }
+
/**
* See #1174
*/
@Test
public void testSearchDateInSavedSearch() {
- for (int i = 1; i <= 9; i++) {
- Patient p1 = new Patient();
- p1.getBirthDateElement().setValueAsString("1980-01-0" + i);
- myPatientDao.create(p1).getId().toUnqualifiedVersionless().getValue();
- }
+ create9Patients();
myStorageSettings.setSearchPreFetchThresholds(Lists.newArrayList(3, 6, 10));
@@ -2578,6 +2589,14 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
}
}
+ private void create9Patients() {
+ for (int i = 1; i <= 9; i++) {
+ Patient p1 = new Patient();
+ p1.getBirthDateElement().setValueAsString("1980-01-0" + i);
+ myPatientDao.create(p1).getId().toUnqualifiedVersionless().getValue();
+ }
+ }
+
/**
* #222
*/
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java
index ac107678214..048e242d700 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4TerminologyTest.java
@@ -71,8 +71,10 @@ public class FhirResourceDaoR4TerminologyTest extends BaseJpaR4Test {
TermReindexingSvcImpl.setForceSaveDeferredAlwaysForUnitTest(false);
}
+ @Override
@BeforeEach
- public void before() {
+ public void before() throws Exception {
+ super.before();
myStorageSettings.setMaximumExpansionSize(5000);
myCachingValidationSupport.invalidateCaches();
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java
index 2056bc76f0d..6b10d43f2a1 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4ValidateTest.java
@@ -1663,6 +1663,7 @@ public class FhirResourceDaoR4ValidateTest extends BaseJpaR4Test {
String rawResource = myFhirContext.newJsonParser().encodeResourceToString(params);
myPatientDao.validate(pat, null, rawResource, EncodingEnum.JSON, ValidationModeEnum.UPDATE, null, mySrd);
}
+
@Test
public void testValidateRawResourceForUpdateWithNoId() {
String methodName = "testValidateForUpdate";
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
index 7a3d8cc05e1..0787c9c250b 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirSystemDaoR4Test.java
@@ -563,7 +563,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
}
@Test
- public void testReindexing() throws InterruptedException {
+ public void testReindexing() {
Patient p = new Patient();
p.addName().setFamily("family");
final IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualified();
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java
index f4b32ac4d51..922ce524224 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/dao/r4/PartitioningSqlR4Test.java
@@ -769,6 +769,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
// Update that resource
addReadPartition(myPartitionId);
+ addCreatePartition(myPartitionId);
p = new Patient();
p.setActive(true);
p.addIdentifier().setValue("12345");
@@ -2751,7 +2752,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
ourLog.info("About to start transaction");
- for (int i = 0; i < 28; i++) {
+ for (int i = 0; i < 40; i++) {
addCreatePartition(1, null);
}
@@ -2863,13 +2864,12 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
// Fetch history resource
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true);
ourLog.info("SQL:{}", searchSql);
- assertEquals(0, countMatches(searchSql, "PARTITION_ID"), searchSql);
+ assertEquals(1, countMatches(searchSql, "PARTITION_ID in"), searchSql);
// Fetch history resource
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(true, true);
ourLog.info("SQL:{}", searchSql);
- assertEquals(0, countMatches(searchSql, "PARTITION_ID="), searchSql.replace(" ", "").toUpperCase());
- assertEquals(0, countMatches(searchSql, "PARTITION_IDIN"), searchSql.replace(" ", "").toUpperCase());
+ assertEquals(1, countMatches(searchSql, "PARTITION_ID in"), searchSql.replace(" ", "").toUpperCase());
}
@Test
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PartitioningInterceptorR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PartitioningInterceptorR4Test.java
index a7ba89e62f7..e6abe1c9b89 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PartitioningInterceptorR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PartitioningInterceptorR4Test.java
@@ -39,7 +39,6 @@ import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mock.web.MockHttpServletRequest;
-import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import java.time.LocalDate;
import java.util.ArrayList;
@@ -79,8 +78,11 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest {
myInterceptorRegistry.unregisterAllInterceptors();
}
+ @Override
@BeforeEach
- public void before() throws ServletException {
+ public void before() throws Exception {
+ super.before();
+
myPartitionSettings.setPartitioningEnabled(true);
myPartitionInterceptor = new MyWriteInterceptor();
@@ -152,7 +154,7 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest {
sd.setUrl("http://foo");
myStructureDefinitionDao.create(sd, new ServletRequestDetails());
- runInTransaction(()->{
+ runInTransaction(() -> {
List resources = myResourceTableDao.findAll();
LocalDate expectedDate = LocalDate.of(2021, 2, 22);
assertEquals(1, resources.size());
@@ -169,7 +171,7 @@ public class PartitioningInterceptorR4Test extends BaseJpaR4SystemTest {
sd.setUrl("http://foo");
myStructureDefinitionDao.create(sd, new ServletRequestDetails());
- runInTransaction(()->{
+ runInTransaction(() -> {
List resources = myResourceTableDao.findAll();
assertEquals(1, resources.size());
assertEquals(null, resources.get(0).getPartitionId());
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java
index 2d4370ee64e..bbfcd43f45a 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptorTest.java
@@ -61,7 +61,7 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
@BeforeEach
public void before() throws Exception {
super.before();
- mySvc = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor);
+ mySvc = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor, myPartitionSettings);
myForceOffsetSearchModeInterceptor = new ForceOffsetSearchModeInterceptor();
myInterceptorRegistry.registerInterceptor(mySvc);
@@ -342,8 +342,8 @@ public class PatientIdPartitionInterceptorTest extends BaseJpaR4SystemTest {
myCaptureQueriesListener.clear();
IBundleProvider outcome = myOrganizationDao.history(new IdType("Organization/C"), null, null, null, mySrd);
- assertEquals(2, outcome.size());
myCaptureQueriesListener.logSelectQueries();
+ assertEquals(2, outcome.size());
assertEquals(3, myCaptureQueriesListener.getSelectQueries().size());
assertThat(myCaptureQueriesListener.getSelectQueries().get(0).getSql(false, false), containsString("PARTITION_ID in "));
assertThat(myCaptureQueriesListener.getSelectQueries().get(1).getSql(false, false), containsString("PARTITION_ID="));
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java
index 5effc5f8b66..583fb6ea597 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/JpaPackageCacheTest.java
@@ -79,7 +79,7 @@ public class JpaPackageCacheTest extends BaseJpaR4Test {
public void testSaveAndDeletePackagePartitionsEnabled() throws IOException {
myPartitionSettings.setPartitioningEnabled(true);
myPartitionSettings.setDefaultPartitionId(1);
- PatientIdPartitionInterceptor patientIdPartitionInterceptor = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor);
+ PatientIdPartitionInterceptor patientIdPartitionInterceptor = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor, myPartitionSettings);
myInterceptorService.registerInterceptor(patientIdPartitionInterceptor);
myInterceptorService.registerInterceptor(myRequestTenantPartitionInterceptor);
try {
@@ -118,7 +118,7 @@ public class JpaPackageCacheTest extends BaseJpaR4Test {
myPartitionSettings.setPartitioningEnabled(true);
myPartitionSettings.setDefaultPartitionId(0);
myPartitionSettings.setUnnamedPartitionMode(true);
- PatientIdPartitionInterceptor patientIdPartitionInterceptor = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor);
+ PatientIdPartitionInterceptor patientIdPartitionInterceptor = new PatientIdPartitionInterceptor(myFhirContext, mySearchParamExtractor, myPartitionSettings);
myInterceptorService.registerInterceptor(patientIdPartitionInterceptor);
myInterceptorService.registerInterceptor(myRequestTenantPartitionInterceptor);
try {
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java
index 6277ee7a989..e8f1768d0c0 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/packages/NpmR4Test.java
@@ -14,10 +14,10 @@ import ca.uhn.fhir.jpa.model.entity.NpmPackageEntity;
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionEntity;
import ca.uhn.fhir.jpa.model.entity.NpmPackageVersionResourceEntity;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
-import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam;
@@ -80,6 +80,9 @@ public class NpmR4Test extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(NpmR4Test.class);
@Autowired
+ @Qualifier("myImplementationGuideDaoR4")
+ protected IFhirResourceDao myImplementationGuideDao;
+ @Autowired
private IHapiPackageCacheManager myPackageCacheManager;
@Autowired
private NpmJpaValidationSupport myNpmJpaValidationSupport;
@@ -95,12 +98,12 @@ public class NpmR4Test extends BaseJpaR4Test {
private IInterceptorService myInterceptorService;
@Autowired
private RequestTenantPartitionInterceptor myRequestTenantPartitionInterceptor;
- @Autowired
- @Qualifier("myImplementationGuideDaoR4")
- protected IFhirResourceDao myImplementationGuideDao;
+ @Override
@BeforeEach
public void before() throws Exception {
+ super.before();
+
JpaPackageCache jpaPackageCache = ProxyUtil.getSingletonTarget(myPackageCacheManager, JpaPackageCache.class);
myServer = new Server(0);
@@ -144,7 +147,7 @@ public class NpmR4Test extends BaseJpaR4Test {
.setFetchDependencies(true);
myPackageInstallerSvc.install(spec);
- runInTransaction(()->{
+ runInTransaction(() -> {
SearchParameterMap map = SearchParameterMap.newSynchronous(SearchParameter.SP_BASE, new TokenParam("NamingSystem"));
IBundleProvider outcome = mySearchParameterDao.search(map);
List resources = outcome.getResources(0, outcome.sizeOrThrowNpe());
@@ -345,24 +348,24 @@ public class NpmR4Test extends BaseJpaR4Test {
public void testNumericIdsInstalledWithNpmPrefix() throws Exception {
myStorageSettings.setAllowExternalReferences(true);
- // Load a copy of hl7.fhir.uv.shorthand-0.12.0, but with id set to 1 instead of "shorthand-code-system"
+ // Load a copy of hl7.fhir.uv.shorthand-0.12.0, but with id set to 1 instead of "shorthand-code-system"
byte[] bytes = ClasspathUtil.loadResourceAsByteArray("/packages/hl7.fhir.uv.shorthand-0.13.0.tgz");
- myFakeNpmServlet.responses.put("/hl7.fhir.uv.shorthand/0.13.0", bytes);
+ myFakeNpmServlet.responses.put("/hl7.fhir.uv.shorthand/0.13.0", bytes);
- PackageInstallationSpec spec = new PackageInstallationSpec().setName("hl7.fhir.uv.shorthand").setVersion("0.13.0").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL);
- PackageInstallOutcomeJson outcome = myPackageInstallerSvc.install(spec);
- // Be sure no further communication with the server
- JettyUtil.closeServer(myServer);
+ PackageInstallationSpec spec = new PackageInstallationSpec().setName("hl7.fhir.uv.shorthand").setVersion("0.13.0").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL);
+ PackageInstallOutcomeJson outcome = myPackageInstallerSvc.install(spec);
+ // Be sure no further communication with the server
+ JettyUtil.closeServer(myServer);
- // Search for the installed resource
- runInTransaction(() -> {
- SearchParameterMap map = SearchParameterMap.newSynchronous();
- map.add(StructureDefinition.SP_URL, new UriParam("http://hl7.org/fhir/uv/shorthand/CodeSystem/shorthand-code-system"));
- IBundleProvider result = myCodeSystemDao.search(map);
- assertEquals(1, result.sizeOrThrowNpe());
- IBaseResource resource = result.getResources(0, 1).get(0);
- assertEquals("CodeSystem/npm-1/_history/1", resource.getIdElement().toString());
- });
+ // Search for the installed resource
+ runInTransaction(() -> {
+ SearchParameterMap map = SearchParameterMap.newSynchronous();
+ map.add(StructureDefinition.SP_URL, new UriParam("http://hl7.org/fhir/uv/shorthand/CodeSystem/shorthand-code-system"));
+ IBundleProvider result = myCodeSystemDao.search(map);
+ assertEquals(1, result.sizeOrThrowNpe());
+ IBaseResource resource = result.getResources(0, 1).get(0);
+ assertEquals("CodeSystem/npm-1/_history/1", resource.getIdElement().toString());
+ });
}
@@ -786,7 +789,7 @@ public class NpmR4Test extends BaseJpaR4Test {
spSearch = myPractitionerRoleDao.search(map, new SystemRequestDetails());
assertEquals(1, spSearch.sizeOrThrowNpe());
-
+
// Install newer version
spec = new PackageInstallationSpec().setName("test-exchange.fhir.us.com").setVersion("2.1.2").setInstallMode(PackageInstallationSpec.InstallModeEnum.STORE_AND_INSTALL);
myPackageInstallerSvc.install(spec);
@@ -799,7 +802,7 @@ public class NpmR4Test extends BaseJpaR4Test {
assertEquals("2.2", sp.getVersion());
}
-
+
@Test
public void testLoadContents() throws IOException {
@@ -889,16 +892,16 @@ public class NpmR4Test extends BaseJpaR4Test {
map.add(StructureDefinition.SP_URL, new UriParam("https://www.medizininformatik-initiative.de/fhir/core/modul-labor/StructureDefinition/LogicalModel/Laborbefund"));
IBundleProvider result = myStructureDefinitionDao.search(map);
assertEquals(1, result.sizeOrThrowNpe());
- List resources = result.getResources(0,1);
- assertFalse(((StructureDefinition)resources.get(0)).hasSnapshot());
+ List resources = result.getResources(0, 1);
+ assertFalse(((StructureDefinition) resources.get(0)).hasSnapshot());
// Confirm that DiagnosticLab (a resource StructureDefinition with differential but no snapshot) was created with a generated snapshot.
map = SearchParameterMap.newSynchronous();
map.add(StructureDefinition.SP_URL, new UriParam("https://www.medizininformatik-initiative.de/fhir/core/modul-labor/StructureDefinition/DiagnosticReportLab"));
result = myStructureDefinitionDao.search(map);
assertEquals(1, result.sizeOrThrowNpe());
- resources = result.getResources(0,1);
- assertTrue(((StructureDefinition)resources.get(0)).hasSnapshot());
+ resources = result.getResources(0, 1);
+ assertTrue(((StructureDefinition) resources.get(0)).hasSnapshot());
});
}
@@ -963,5 +966,4 @@ public class NpmR4Test extends BaseJpaR4Test {
}
-
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingR4Test.java
index 6ce428f6d15..b30a1216b8d 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientEverythingR4Test.java
@@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import com.google.common.base.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.ClientProtocolException;
@@ -107,7 +108,7 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test {
Task task = new Task();
task.setStatus(Task.TaskStatus.COMPLETED);
task.getOwner().setReference(patId);
- taskId = myClient.create().resource(task).execute().getId().toUnqualifiedVersionless().getValue();
+ taskId = myClient.create().resource(task).execute().getId().toUnqualifiedVersionless().getValue();
Encounter wrongEnc1 = new Encounter();
wrongEnc1.setStatus(EncounterStatus.ARRIVED);
@@ -169,18 +170,18 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test {
*/
@Test
public void testEverythingReturnsCorrectResources() throws Exception {
-
+
Bundle bundle = fetchBundle(myServerBase + "/" + patId + "/$everything?_format=json&_count=100", EncodingEnum.JSON);
-
+
assertNull(bundle.getLink("next"));
-
+
Set actual = new TreeSet<>();
for (BundleEntryComponent nextEntry : bundle.getEntry()) {
actual.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
}
-
+
ourLog.info("Found IDs: {}", actual);
-
+
assertThat(actual, hasItem(patId));
assertThat(actual, hasItem(encId1));
assertThat(actual, hasItem(encId2));
@@ -199,16 +200,16 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test {
myStorageSettings.setEverythingIncludesFetchPageSize(1);
Bundle bundle = fetchBundle(myServerBase + "/" + patId + "/$everything?_format=json&_count=100", EncodingEnum.JSON);
-
+
assertNull(bundle.getLink("next"));
-
+
Set actual = new TreeSet();
for (BundleEntryComponent nextEntry : bundle.getEntry()) {
actual.add(nextEntry.getResource().getIdElement().toUnqualifiedVersionless().getValue());
}
-
+
ourLog.info("Found IDs: {}", actual);
-
+
assertThat(actual, hasItem(patId));
assertThat(actual, hasItem(encId1));
assertThat(actual, hasItem(encId2));
@@ -217,19 +218,19 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test {
assertThat(actual, not(hasItem(myWrongPatId)));
assertThat(actual, not(hasItem(myWrongEnc1)));
}
-
+
/**
* See #674
*/
@Test
public void testEverythingPagesWithCorrectEncodingJson() throws Exception {
-
+
Bundle bundle = fetchBundle(myServerBase + "/" + patId + "/$everything?_format=json&_count=1", EncodingEnum.JSON);
-
+
assertNotNull(bundle.getLink("next").getUrl());
assertThat(bundle.getLink("next").getUrl(), containsString("_format=json"));
bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.JSON);
-
+
assertNotNull(bundle.getLink("next").getUrl());
assertThat(bundle.getLink("next").getUrl(), containsString("_format=json"));
bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.JSON);
@@ -240,9 +241,9 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test {
*/
@Test
public void testEverythingPagesWithCorrectEncodingXml() throws Exception {
-
+
Bundle bundle = fetchBundle(myServerBase + "/" + patId + "/$everything?_format=xml&_count=1", EncodingEnum.XML);
-
+
assertNotNull(bundle.getLink("next").getUrl());
ourLog.info("Next link: {}", bundle.getLink("next").getUrl());
assertThat(bundle.getLink("next").getUrl(), containsString("_format=xml"));
@@ -253,12 +254,13 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test {
assertThat(bundle.getLink("next").getUrl(), containsString("_format=xml"));
bundle = fetchBundle(bundle.getLink("next").getUrl(), EncodingEnum.XML);
}
+
@Test
//See https://github.com/hapifhir/hapi-fhir/issues/3215
public void testEverythingWithLargeResultSetDoesNotNpe() throws IOException {
for (int i = 0; i < 500; i++) {
Observation obs1 = new Observation();
- obs1.setSubject(new Reference( patId));
+ obs1.setSubject(new Reference(patId));
obs1.getCode().addCoding().setCode("CODE1");
obs1.setValue(new StringType("obsvalue1"));
myObservationDao.create(obs1, new SystemRequestDetails()).getId().toUnqualifiedVersionless();
@@ -283,7 +285,7 @@ public class PatientEverythingR4Test extends BaseResourceProviderR4Test {
} finally {
IOUtils.closeQuietly(resp);
}
-
+
return bundle;
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMemberMatchOperationR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMemberMatchOperationR4Test.java
index 4b3bbdbf580..638e9a5f0db 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMemberMatchOperationR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/PatientMemberMatchOperationR4Test.java
@@ -129,7 +129,7 @@ public class PatientMemberMatchOperationR4Test extends BaseResourceProviderR4Tes
.setId("Patient/A123");
if (includeBeneficiaryIdentifier) {
member.setIdentifier(Collections.singletonList(new Identifier()
- .setSystem(EXISTING_COVERAGE_PATIENT_IDENT_SYSTEM).setValue(EXISTING_COVERAGE_PATIENT_IDENT_VALUE)));
+ .setSystem(EXISTING_COVERAGE_PATIENT_IDENT_SYSTEM).setValue(EXISTING_COVERAGE_PATIENT_IDENT_VALUE)));
}
member.setActive(true);
myClient.update().resource(member).execute();
@@ -248,6 +248,158 @@ public class PatientMemberMatchOperationR4Test extends BaseResourceProviderR4Tes
validateResponseConsent(parametersResponse, myConsent);
}
+ @Test
+ public void testMemberMatchByCoverageIdentifier() throws Exception {
+ createCoverageWithBeneficiary(true, true);
+
+ Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent);
+ Parameters parametersResponse = performOperation(myServerBase + ourQuery, EncodingEnum.JSON, inputParameters);
+
+ validateMemberPatient(parametersResponse);
+ validateNewCoverage(parametersResponse, newCoverage);
+ }
+
+ /**
+ * Validates that second resource from the response is same as the received coverage
+ */
+ private void validateNewCoverage(Parameters theResponse, Coverage theOriginalCoverage) {
+ List patientList = ParametersUtil.getNamedParameters(this.getFhirContext(), theResponse, PARAM_NEW_COVERAGE);
+ assertEquals(1, patientList.size());
+ Coverage respCoverage = (Coverage) theResponse.getParameter().get(1).getResource();
+
+ assertEquals("Coverage/" + theOriginalCoverage.getId(), respCoverage.getId());
+ assertEquals(theOriginalCoverage.getIdentifierFirstRep().getSystem(), respCoverage.getIdentifierFirstRep().getSystem());
+ assertEquals(theOriginalCoverage.getIdentifierFirstRep().getValue(), respCoverage.getIdentifierFirstRep().getValue());
+ }
+
+ private void validateConsentPatientAndPerformerRef(Patient thePatient, Consent theConsent) {
+ String patientRef = thePatient.getIdElement().toUnqualifiedVersionless().getValue();
+ assertEquals(patientRef, theConsent.getPatient().getReference());
+ assertEquals(patientRef, theConsent.getPerformer().get(0).getReference());
+ }
+
+ private void validateMemberPatient(Parameters response) {
+// parameter MemberPatient must have a new identifier with:
+// {
+// "use": "usual",
+// "type": {
+// "coding": [
+// {
+// "system": "http://terminology.hl7.org/CodeSystem/v2-0203",
+// "code": "UMB",
+// "display": "Member Number",
+// "userSelected": false
+// }
+// ],
+// "text": "Member Number"
+// },
+// "system": COVERAGE_PATIENT_IDENT_SYSTEM,
+// "value": COVERAGE_PATIENT_IDENT_VALUE
+// }
+ List patientList = ParametersUtil.getNamedParameters(this.getFhirContext(), response, PARAM_MEMBER_PATIENT);
+ assertEquals(1, patientList.size());
+ Patient resultPatient = (Patient) response.getParameter().get(0).getResource();
+
+ assertNotNull(resultPatient.getIdentifier());
+ assertEquals(1, resultPatient.getIdentifier().size());
+ Identifier addedIdentifier = resultPatient.getIdentifier().get(0);
+ assertEquals(Identifier.IdentifierUse.USUAL, addedIdentifier.getUse());
+ checkCoding(addedIdentifier.getType());
+ assertEquals(EXISTING_COVERAGE_PATIENT_IDENT_SYSTEM, addedIdentifier.getSystem());
+ assertEquals(EXISTING_COVERAGE_PATIENT_IDENT_VALUE, addedIdentifier.getValue());
+ }
+
+ @Test
+ public void testNoCoverageMatchFound() throws Exception {
+ Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent);
+ performOperationExpecting422(myServerBase + ourQuery, EncodingEnum.JSON, inputParameters,
+ "Could not find coverage for member");
+ }
+
+ @Test
+ public void testConsentUpdatePatientAndPerformer() throws Exception {
+ createCoverageWithBeneficiary(true, true);
+ Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent);
+ Parameters parametersResponse = performOperation(myServerBase + ourQuery,
+ EncodingEnum.JSON, inputParameters);
+
+ Consent respConsent = validateResponseConsent(parametersResponse, myConsent);
+ validateConsentPatientAndPerformerRef(myPatient, respConsent);
+ }
+
+ private Parameters buildInputParameters(Patient thePatient, Coverage theOldCoverage, Coverage theNewCoverage, Consent theConsent) {
+ Parameters p = new Parameters();
+ ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_MEMBER_PATIENT, thePatient);
+ ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_OLD_COVERAGE, theOldCoverage);
+ ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_NEW_COVERAGE, theNewCoverage);
+ ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_CONSENT, theConsent);
+ return p;
+ }
+
+ private Parameters performOperation(String theUrl,
+ EncodingEnum theEncoding, Parameters theInputParameters) throws Exception {
+
+ HttpPost post = new HttpPost(theUrl);
+ post.addHeader(Constants.HEADER_ACCEPT_ENCODING, theEncoding.toString());
+ post.setEntity(new ResourceEntity(this.getFhirContext(), theInputParameters));
+ ourLog.info("Request: {}", post);
+ try (CloseableHttpResponse response = ourHttpClient.execute(post)) {
+ String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
+ ourLog.info("Response: {}", responseString);
+ assertEquals(200, response.getStatusLine().getStatusCode());
+
+ return theEncoding.newParser(myFhirContext).parseResource(Parameters.class,
+ responseString);
+ }
+ }
+
+ private void performOperationExpecting422(String theUrl, EncodingEnum theEncoding,
+ Parameters theInputParameters, String theExpectedErrorMsg) throws Exception {
+
+ HttpPost post = new HttpPost(theUrl);
+ post.addHeader(Constants.HEADER_ACCEPT_ENCODING, theEncoding.toString());
+ post.setEntity(new ResourceEntity(this.getFhirContext(), theInputParameters));
+ ourLog.info("Request: {}", post);
+ try (CloseableHttpResponse response = ourHttpClient.execute(post)) {
+ String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
+ ourLog.info("Response: {}", responseString);
+ assertEquals(422, response.getStatusLine().getStatusCode());
+ assertThat(responseString, containsString(theExpectedErrorMsg));
+ }
+ }
+
+ private void checkCoding(CodeableConcept theType) {
+ // must match:
+ // "coding": [
+ // {
+ // "system": "http://terminology.hl7.org/CodeSystem/v2-0203",
+ // "code": "MB",
+ // "display": "Member Number",
+ // "userSelected": false
+ // }
+ // * ]
+ Coding coding = theType.getCoding().get(0);
+ assertEquals("http://terminology.hl7.org/CodeSystem/v2-0203", coding.getSystem());
+ assertEquals("MB", coding.getCode());
+ assertEquals("Member Number", coding.getDisplay());
+ assertFalse(coding.getUserSelected());
+ }
+
+ /**
+ * Validates that consent from the response is same as the received consent with additional identifier and extension
+ */
+ private Consent validateResponseConsent(Parameters theResponse, Consent theOriginalConsent) {
+ List consentList = ParametersUtil.getNamedParameters(this.getFhirContext(), theResponse, PARAM_CONSENT);
+ assertEquals(1, consentList.size());
+ Consent respConsent = (Consent) theResponse.getParameter().get(2).getResource();
+
+ assertEquals(theOriginalConsent.getScope().getCodingFirstRep().getSystem(), respConsent.getScope().getCodingFirstRep().getSystem());
+ assertEquals(theOriginalConsent.getScope().getCodingFirstRep().getCode(), respConsent.getScope().getCodingFirstRep().getCode());
+ assertEquals(myMemberMatcherR4Helper.CONSENT_IDENTIFIER_CODE_SYSTEM, respConsent.getIdentifier().get(0).getSystem());
+ assertNotNull(respConsent.getIdentifier().get(0).getValue());
+ return respConsent;
+ }
+
@Nested
public class ValidateParameterErrors {
private Patient ourPatient;
@@ -318,7 +470,7 @@ public class PatientMemberMatchOperationR4Test extends BaseResourceProviderR4Tes
@Test
public void testMissingConsentPatientReference() throws Exception {
ourPatient.setName(Lists.newArrayList(new HumanName()
- .setUse(HumanName.NameUse.OFFICIAL).setFamily("Person")))
+ .setUse(HumanName.NameUse.OFFICIAL).setFamily("Person")))
.setBirthDateElement(new DateType("2020-01-01"));
Parameters inputParameters = buildInputParameters(ourPatient, ourOldCoverage, ourNewCoverage, ourConsent);
@@ -339,164 +491,4 @@ public class PatientMemberMatchOperationR4Test extends BaseResourceProviderR4Tes
}
}
-
- @Test
- public void testMemberMatchByCoverageIdentifier() throws Exception {
- createCoverageWithBeneficiary(true, true);
-
- Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent);
- Parameters parametersResponse = performOperation(myServerBase + ourQuery, EncodingEnum.JSON, inputParameters);
-
- validateMemberPatient(parametersResponse);
- validateNewCoverage(parametersResponse, newCoverage);
- }
-
-
- /**
- * Validates that second resource from the response is same as the received coverage
- */
- private void validateNewCoverage(Parameters theResponse, Coverage theOriginalCoverage) {
- List patientList = ParametersUtil.getNamedParameters(this.getFhirContext(), theResponse, PARAM_NEW_COVERAGE);
- assertEquals(1, patientList.size());
- Coverage respCoverage = (Coverage) theResponse.getParameter().get(1).getResource();
-
- assertEquals("Coverage/" + theOriginalCoverage.getId(), respCoverage.getId());
- assertEquals(theOriginalCoverage.getIdentifierFirstRep().getSystem(), respCoverage.getIdentifierFirstRep().getSystem());
- assertEquals(theOriginalCoverage.getIdentifierFirstRep().getValue(), respCoverage.getIdentifierFirstRep().getValue());
- }
-
- private void validateConsentPatientAndPerformerRef(Patient thePatient, Consent theConsent) {
- String patientRef = thePatient.getIdElement().toUnqualifiedVersionless().getValue();
- assertEquals(patientRef, theConsent.getPatient().getReference());
- assertEquals(patientRef, theConsent.getPerformer().get(0).getReference());
- }
-
-
- private void validateMemberPatient(Parameters response) {
-// parameter MemberPatient must have a new identifier with:
-// {
-// "use": "usual",
-// "type": {
-// "coding": [
-// {
-// "system": "http://terminology.hl7.org/CodeSystem/v2-0203",
-// "code": "UMB",
-// "display": "Member Number",
-// "userSelected": false
-// }
-// ],
-// "text": "Member Number"
-// },
-// "system": COVERAGE_PATIENT_IDENT_SYSTEM,
-// "value": COVERAGE_PATIENT_IDENT_VALUE
-// }
- List patientList = ParametersUtil.getNamedParameters(this.getFhirContext(), response, PARAM_MEMBER_PATIENT);
- assertEquals(1, patientList.size());
- Patient resultPatient = (Patient) response.getParameter().get(0).getResource();
-
- assertNotNull(resultPatient.getIdentifier());
- assertEquals(1, resultPatient.getIdentifier().size());
- Identifier addedIdentifier = resultPatient.getIdentifier().get(0);
- assertEquals(Identifier.IdentifierUse.USUAL, addedIdentifier.getUse());
- checkCoding(addedIdentifier.getType());
- assertEquals(EXISTING_COVERAGE_PATIENT_IDENT_SYSTEM, addedIdentifier.getSystem());
- assertEquals(EXISTING_COVERAGE_PATIENT_IDENT_VALUE, addedIdentifier.getValue());
- }
-
-
- @Test
- public void testNoCoverageMatchFound() throws Exception {
- Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent);
- performOperationExpecting422(myServerBase + ourQuery, EncodingEnum.JSON, inputParameters,
- "Could not find coverage for member");
- }
-
- @Test
- public void testConsentUpdatePatientAndPerformer() throws Exception {
- createCoverageWithBeneficiary(true, true);
- Parameters inputParameters = buildInputParameters(myPatient, oldCoverage, newCoverage, myConsent);
- Parameters parametersResponse = performOperation(myServerBase + ourQuery,
- EncodingEnum.JSON, inputParameters);
-
- Consent respConsent = validateResponseConsent(parametersResponse, myConsent);
- validateConsentPatientAndPerformerRef(myPatient, respConsent);
- }
-
-
- private Parameters buildInputParameters(Patient thePatient, Coverage theOldCoverage, Coverage theNewCoverage, Consent theConsent) {
- Parameters p = new Parameters();
- ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_MEMBER_PATIENT, thePatient);
- ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_OLD_COVERAGE, theOldCoverage);
- ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_NEW_COVERAGE, theNewCoverage);
- ParametersUtil.addParameterToParameters(this.getFhirContext(), p, PARAM_CONSENT, theConsent);
- return p;
- }
-
-
- private Parameters performOperation(String theUrl,
- EncodingEnum theEncoding, Parameters theInputParameters) throws Exception {
-
- HttpPost post = new HttpPost(theUrl);
- post.addHeader(Constants.HEADER_ACCEPT_ENCODING, theEncoding.toString());
- post.setEntity(new ResourceEntity(this.getFhirContext(), theInputParameters));
- ourLog.info("Request: {}", post);
- try (CloseableHttpResponse response = ourHttpClient.execute(post)) {
- String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
- ourLog.info("Response: {}", responseString);
- assertEquals(200, response.getStatusLine().getStatusCode());
-
- return theEncoding.newParser(myFhirContext).parseResource(Parameters.class,
- responseString);
- }
- }
-
-
- private void performOperationExpecting422(String theUrl, EncodingEnum theEncoding,
- Parameters theInputParameters, String theExpectedErrorMsg) throws Exception {
-
- HttpPost post = new HttpPost(theUrl);
- post.addHeader(Constants.HEADER_ACCEPT_ENCODING, theEncoding.toString());
- post.setEntity(new ResourceEntity(this.getFhirContext(), theInputParameters));
- ourLog.info("Request: {}", post);
- try (CloseableHttpResponse response = ourHttpClient.execute(post)) {
- String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
- ourLog.info("Response: {}", responseString);
- assertEquals(422, response.getStatusLine().getStatusCode());
- assertThat(responseString, containsString(theExpectedErrorMsg));
- }
- }
-
-
- private void checkCoding(CodeableConcept theType) {
- // must match:
- // "coding": [
- // {
- // "system": "http://terminology.hl7.org/CodeSystem/v2-0203",
- // "code": "MB",
- // "display": "Member Number",
- // "userSelected": false
- // }
- // * ]
- Coding coding = theType.getCoding().get(0);
- assertEquals("http://terminology.hl7.org/CodeSystem/v2-0203", coding.getSystem());
- assertEquals("MB", coding.getCode());
- assertEquals("Member Number", coding.getDisplay());
- assertFalse(coding.getUserSelected());
- }
-
- /**
- * Validates that consent from the response is same as the received consent with additional identifier and extension
- */
- private Consent validateResponseConsent(Parameters theResponse, Consent theOriginalConsent) {
- List consentList = ParametersUtil.getNamedParameters(this.getFhirContext(), theResponse, PARAM_CONSENT);
- assertEquals(1, consentList.size());
- Consent respConsent= (Consent) theResponse.getParameter().get(2).getResource();
-
- assertEquals(theOriginalConsent.getScope().getCodingFirstRep().getSystem(), respConsent.getScope().getCodingFirstRep().getSystem());
- assertEquals(theOriginalConsent.getScope().getCodingFirstRep().getCode(), respConsent.getScope().getCodingFirstRep().getCode());
- assertEquals(myMemberMatcherR4Helper.CONSENT_IDENTIFIER_CODE_SYSTEM, respConsent.getIdentifier().get(0).getSystem());
- assertNotNull(respConsent.getIdentifier().get(0).getValue());
- return respConsent;
- }
-
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/RemoteTerminologyServiceResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/RemoteTerminologyServiceResourceProviderR4Test.java
index c31db7196e3..a02443b3f09 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/RemoteTerminologyServiceResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/RemoteTerminologyServiceResourceProviderR4Test.java
@@ -8,6 +8,7 @@ import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.annotation.RequiredParam;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
+import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.server.IResourceProvider;
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
@@ -43,7 +44,7 @@ public class RemoteTerminologyServiceResourceProviderR4Test {
private static final String CODE = "CODE";
private static final String VALUE_SET_URL = "http://value.set/url";
private static final String SAMPLE_MESSAGE = "This is a sample message";
- private static FhirContext ourCtx = FhirContext.forR4();
+ private static final FhirContext ourCtx = FhirContext.forR4Cached();
private MyCodeSystemProvider myCodeSystemProvider = new MyCodeSystemProvider();
private MyValueSetProvider myValueSetProvider = new MyValueSetProvider();
@@ -57,6 +58,7 @@ public class RemoteTerminologyServiceResourceProviderR4Test {
public void before_ConfigureService() {
String myBaseUrl = "http://localhost:" + myRestfulServerExtension.getPort();
mySvc = new RemoteTerminologyServiceValidationSupport(ourCtx, myBaseUrl);
+ mySvc.addClientInterceptor(new LoggingInterceptor(false).setLogRequestSummary(true).setLogResponseSummary(true));
}
@AfterEach
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java
index d77440aeef7..4dbbee087a6 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderHasParamR4Test.java
@@ -611,7 +611,7 @@ public class ResourceProviderHasParamR4Test extends BaseResourceProviderR4Test {
searchAndReturnUnqualifiedVersionlessIdValues(uri);
- List queries = myCaptureQueriesListener.getSelectQueries().stream().map(t -> t.getSql(true, false)).collect(Collectors.toList());
+ List queries = myCaptureQueriesListener.getSelectQueries().stream().map(t -> t.getSql(true, false)).toList();
List notInListQueries = new ArrayList<>();
for (String query : queries) {
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInterceptorR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInterceptorR4Test.java
index b7e8e117bbc..2e17f8d2780 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInterceptorR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderInterceptorR4Test.java
@@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
import ca.uhn.fhir.jpa.provider.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.searchparam.submit.interceptor.SearchParamValidatingInterceptor;
import ca.uhn.fhir.parser.IParser;
+import ca.uhn.fhir.rest.api.CacheControlDirective;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
@@ -238,7 +239,7 @@ public class ResourceProviderInterceptorR4Test extends BaseResourceProviderR4Tes
@Test
public void testCreateReflexResourceTheHardWay() {
- ServerOperationInterceptorAdapter interceptor = new ReflexInterceptor();
+ ReflexInterceptor interceptor = new ReflexInterceptor();
myServer.getRestfulServer().registerInterceptor(interceptor);
try {
@@ -249,13 +250,16 @@ public class ResourceProviderInterceptorR4Test extends BaseResourceProviderR4Tes
await()
.atMost(60, TimeUnit.SECONDS)
+ .pollInterval(1, TimeUnit.SECONDS)
.until(()->{
Bundle observations = myClient
.search()
.forResource("Observation")
.where(Observation.SUBJECT.hasId(pid))
.returnBundle(Bundle.class)
+ .cacheControl(CacheControlDirective.noCache())
.execute();
+ ourLog.info("Have {} observations", observations.getEntry().size());
return observations.getEntry().size();
},
equalTo(1));
@@ -558,22 +562,27 @@ public class ResourceProviderInterceptorR4Test extends BaseResourceProviderR4Tes
}
- public class ReflexInterceptor extends ServerOperationInterceptorAdapter {
- @Override
+ @Interceptor
+ public class ReflexInterceptor {
+
+ @Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)
public void resourceCreated(RequestDetails theRequest, IBaseResource theResource) {
+ ourLog.info("resourceCreated with {}", theResource);
if (theResource instanceof Patient) {
((ServletRequestDetails) theRequest).getServletRequest().setAttribute("CREATED_PATIENT", theResource);
}
}
- @Override
+ @Hook(Pointcut.SERVER_OUTGOING_RESPONSE)
public void processingCompletedNormally(ServletRequestDetails theRequestDetails) {
Patient createdPatient = (Patient) theRequestDetails.getServletRequest().getAttribute("CREATED_PATIENT");
+ ourLog.info("processingCompletedNormally with {}", createdPatient);
if (createdPatient != null) {
Observation observation = new Observation();
observation.setSubject(new Reference(createdPatient.getId()));
- myClient.create().resource(observation).execute();
+ IIdType id = myClient.create().resource(observation).execute().getId();
+ ourLog.info("Created Observation with ID: {}", id);
}
}
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetHSearchDisabledTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetHSearchDisabledTest.java
index 67dc5a72e85..a71da858d22 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetHSearchDisabledTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetHSearchDisabledTest.java
@@ -5,28 +5,19 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
import ca.uhn.fhir.jpa.provider.ValueSetOperationProvider;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
-import ca.uhn.fhir.jpa.subscription.match.config.WebsocketDispatcherConfig;
import ca.uhn.fhir.jpa.test.BaseJpaTest;
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
-import ca.uhn.fhir.parser.StrictErrorHandler;
import ca.uhn.fhir.rest.api.EncodingEnum;
-import ca.uhn.fhir.rest.client.api.IGenericClient;
-import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
-import ca.uhn.fhir.rest.server.RestfulServer;
-import ca.uhn.fhir.rest.server.interceptor.CorsInterceptor;
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
-import ca.uhn.fhir.test.utilities.JettyUtil;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
+import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.ValueSet;
-import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.extension.RegisterExtension;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.ApplicationContext;
@@ -36,16 +27,9 @@ import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
-import org.springframework.web.context.ContextLoader;
-import org.springframework.web.context.WebApplicationContext;
-import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
-import org.springframework.web.context.support.GenericWebApplicationContext;
-import org.springframework.web.cors.CorsConfiguration;
-import org.springframework.web.servlet.DispatcherServlet;
import javax.annotation.Nonnull;
import java.io.IOException;
-import java.util.Arrays;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
@@ -57,10 +41,6 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResourceProviderR4ValueSetHSearchDisabledTest.class);
- private static RestfulServer ourRestServer;
- private static String ourServerBase;
- private static Server ourServer;
- private static DatabaseBackedPagingProvider ourPagingProvider;
@Autowired
private FhirContext myFhirCtx;
@@ -81,7 +61,13 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest {
private IIdType myExtensionalCsId;
private IIdType myExtensionalVsId;
- private IGenericClient myClient;
+ @SuppressWarnings("JUnitMalformedDeclaration")
+ @RegisterExtension
+ private RestfulServerExtension myServer = new RestfulServerExtension(FhirContext.forR4Cached())
+ .withServer(t -> t.registerProviders(myResourceProviders.createProviders()))
+ .withServer(t -> t.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class)))
+ .withServer(t -> t.setDefaultResponseEncoding(EncodingEnum.XML))
+ .withServer(t -> t.setPagingProvider(myAppCtx.getBean(DatabaseBackedPagingProvider.class)));
private void loadAndPersistCodeSystemAndValueSet() throws IOException {
loadAndPersistCodeSystem();
@@ -101,7 +87,7 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest {
myExtensionalCsId = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId, null);
}
private void loadAndPersistValueSet() throws IOException {
@@ -117,7 +103,7 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest {
myExtensionalVsId = myValueSetDao.create(theValueSet, mySrd).getId().toUnqualifiedVersionless();
}
});
- myValueSetDao.readEntity(myExtensionalVsId, null).getId();
+ myValueSetDao.readEntity(myExtensionalVsId, null);
}
@Override
@@ -130,12 +116,12 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest {
return myTxManager;
}
-
@Test
public void testExpandById() throws Exception {
loadAndPersistCodeSystemAndValueSet();
- Parameters respParam = myClient
+ Parameters respParam = myServer
+ .getFhirClient()
.operation()
.onInstance(myExtensionalVsId)
.named("expand")
@@ -161,76 +147,5 @@ public class ResourceProviderR4ValueSetHSearchDisabledTest extends BaseJpaTest {
}
- @BeforeEach
- public void before() throws Exception {
- myFhirCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER);
- myFhirCtx.getRestfulClientFactory().setSocketTimeout(1200 * 1000);
- myFhirCtx.setParserErrorHandler(new StrictErrorHandler());
-
- if (ourServer == null) {
- ourRestServer = new RestfulServer(myFhirCtx);
- ourRestServer.registerProviders(myResourceProviders.createProviders());
- ourRestServer.setDefaultResponseEncoding(EncodingEnum.XML);
-
- ourPagingProvider = myAppCtx.getBean(DatabaseBackedPagingProvider.class);
-
- ourRestServer.registerProvider(myAppCtx.getBean(ValueSetOperationProvider.class));
-
- Server server = new Server(0);
-
- ServletContextHandler proxyHandler = new ServletContextHandler();
- proxyHandler.setContextPath("/");
-
- ServletHolder servletHolder = new ServletHolder();
- servletHolder.setServlet(ourRestServer);
- proxyHandler.addServlet(servletHolder, "/fhir/context/*");
-
- GenericWebApplicationContext webApplicationContext = new GenericWebApplicationContext();
- webApplicationContext.setParent(myAppCtx);
- webApplicationContext.refresh();
- proxyHandler.getServletContext().setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, webApplicationContext);
-
- DispatcherServlet dispatcherServlet = new DispatcherServlet();
- // dispatcherServlet.setApplicationContext(webApplicationContext);
- dispatcherServlet.setContextClass(AnnotationConfigWebApplicationContext.class);
- ServletHolder subsServletHolder = new ServletHolder();
- subsServletHolder.setServlet(dispatcherServlet);
- subsServletHolder.setInitParameter(
- ContextLoader.CONFIG_LOCATION_PARAM,
- WebsocketDispatcherConfig.class.getName());
- proxyHandler.addServlet(subsServletHolder, "/*");
-
- // Register a CORS filter
- CorsConfiguration config = new CorsConfiguration();
- CorsInterceptor corsInterceptor = new CorsInterceptor(config);
- config.addAllowedHeader("x-fhir-starter");
- config.addAllowedHeader("Origin");
- config.addAllowedHeader("Accept");
- config.addAllowedHeader("X-Requested-With");
- config.addAllowedHeader("Content-Type");
- config.addAllowedHeader("Access-Control-Request-Method");
- config.addAllowedHeader("Access-Control-Request-Headers");
- config.addAllowedOrigin("*");
- config.addExposedHeader("Location");
- config.addExposedHeader("Content-Location");
- config.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE", "OPTIONS"));
- ourRestServer.registerInterceptor(corsInterceptor);
-
- server.setHandler(proxyHandler);
- JettyUtil.startServer(server);
- int port = JettyUtil.getPortForStartedServer(server);
- ourServerBase = "http://localhost:" + port + "/fhir/context";
-
- myFhirCtx.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER);
- myFhirCtx.getRestfulClientFactory().setSocketTimeout(20000);
-
- ourServer = server;
- }
-
- ourRestServer.setPagingProvider(ourPagingProvider);
-
- myClient = myFhirCtx.newRestfulGenericClient(ourServerBase);
- }
-
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java
index 3c0b880b903..76e917ae3f4 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java
@@ -114,7 +114,7 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv
myExtensionalCsId = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId, null);
}
private void loadAndPersistValueSet() throws IOException {
@@ -145,7 +145,7 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv
default:
throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb);
}
- myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getId();
+ myExtensionalVsIdOnResourceTable = (Long) myValueSetDao.readEntity(myExtensionalVsId, null).getPersistentId().getId();
}
private CodeSystem createExternalCs() {
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java
index 035a23222ff..2ce02edde6a 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java
@@ -106,7 +106,7 @@ public class ResourceProviderR4ValueSetVerCSNoVerTest extends BaseResourceProvid
myExtensionalCsId = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId, null);
}
private void loadAndPersistValueSet() throws IOException {
@@ -138,7 +138,7 @@ public class ResourceProviderR4ValueSetVerCSNoVerTest extends BaseResourceProvid
default:
throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb);
}
- myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getId();
+ myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getIdDt().getIdPartAsLong();
}
private CodeSystem createExternalCs() {
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java
index ad75e811737..57f71d85b37 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java
@@ -106,7 +106,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider
myExtensionalCsId_v1 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId_v1, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId_v1, null);
theCodeSystem.setId("CodeSystem/cs2");
theCodeSystem.setVersion("2");
@@ -119,7 +119,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider
myExtensionalCsId_v2 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId_v2, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId_v2, null);
}
@@ -129,13 +129,13 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider
valueSet.setId("ValueSet/vs1");
valueSet.getCompose().getInclude().get(0).setVersion("1");
myExtensionalVsId_v1 = persistSingleValueSet(valueSet, HttpVerb.POST);
- myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getId();
+ myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getIdDt().getIdPartAsLong();
valueSet.setVersion("2");
valueSet.setId("ValueSet/vs2");
valueSet.getCompose().getInclude().get(0).setVersion("2");
myExtensionalVsId_v2 = persistSingleValueSet(valueSet, HttpVerb.POST);
- myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getId();
+ myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getIdDt().getIdPartAsLong();
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java
index 5a23c780ba4..fb4091a8279 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/reindex/ReindexStepTest.java
@@ -61,7 +61,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify
assertEquals(2, outcome.getRecordsProcessed());
- assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
+ assertEquals(6, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
@@ -91,7 +91,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify
assertEquals(2, outcome.getRecordsProcessed());
- assertEquals(6, myCaptureQueriesListener.logSelectQueries().size());
+ assertEquals(8, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
@@ -124,7 +124,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify
assertEquals(2, outcome.getRecordsProcessed());
- assertEquals(4, myCaptureQueriesListener.logSelectQueries().size());
+ assertEquals(6, myCaptureQueriesListener.logSelectQueries().size());
// name, family, phonetic, deceased, active
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
assertEquals(0, myCaptureQueriesListener.countUpdateQueries());
@@ -192,7 +192,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify
assertEquals(2, outcome.getRecordsProcessed());
- assertEquals(8, myCaptureQueriesListener.logSelectQueries().size());
+ assertEquals(10, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(0, myCaptureQueriesListener.countInsertQueries());
assertEquals(4, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
@@ -237,7 +237,7 @@ public class ReindexStepTest extends BaseJpaR4Test {
// Verify
assertEquals(4, outcome.getRecordsProcessed());
- assertEquals(5, myCaptureQueriesListener.logSelectQueries().size());
+ assertEquals(9, myCaptureQueriesListener.logSelectQueries().size());
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
assertEquals(2, myCaptureQueriesListener.countUpdateQueries());
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ReindexRaceBugTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ReindexRaceBugTest.java
new file mode 100644
index 00000000000..ca920975a30
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ReindexRaceBugTest.java
@@ -0,0 +1,192 @@
+package ca.uhn.fhir.jpa.search.reindex;
+
+import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
+import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
+import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
+import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
+import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
+import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
+import ca.uhn.fhir.storage.test.DaoTestDataBuilder;
+import ca.uhn.test.concurrency.LockstepEnumPhaser;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r4.model.Observation;
+import org.hl7.fhir.r4.model.SearchParameter;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.transaction.TransactionStatus;
+import org.springframework.transaction.annotation.Propagation;
+
+import javax.annotation.Nonnull;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.ThreadFactory;
+import java.util.function.BiFunction;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
+
+@ContextConfiguration(classes = {
+ DaoTestDataBuilder.Config.class
+})
+class ReindexRaceBugTest extends BaseJpaR4Test {
+ private static final Logger ourLog = LoggerFactory.getLogger(ReindexRaceBugTest.class);
+ @Autowired
+ HapiTransactionService myHapiTransactionService;
+
+ enum Steps {
+ STARTING, RUN_REINDEX, RUN_DELETE, COMMIT_REINDEX, FINISHED
+ }
+
+ /**
+ * Simulate reindex job step overlapping with resource deletion.
+ * The $reindex step processes several resources in a single tx.
+ * The tested sequence here is: job step $reindexes a resouce, then another thread DELETEs the resource,
+ * then later, the $reindex step finishes the rest of the resources and commits AFTER the DELETE commits.
+ *
+ * This was inserting new index rows into HFJ_SPIDX_TOKEN even though the resource was gone.
+ * This is an illegal state for our index. Deleted resources should never have content in HFJ_SPIDX_*
+ */
+ @Test
+ void deleteOverlapsWithReindex_leavesIndexRowsP() throws InterruptedException, ExecutionException {
+ LockstepEnumPhaser phaser = new LockstepEnumPhaser<>(2, Steps.class);
+
+ ourLog.info("An observation is created");
+
+ var observationCreateOutcome = callInFreshTx((tx, rd) -> {
+ Observation o = (Observation) buildResource("Observation", withEffectiveDate("2021-01-01T00:00:00"));
+ return myObservationDao.create(o, rd);
+ });
+ IIdType observationId = observationCreateOutcome.getId().toVersionless();
+ long observationPid = Long.parseLong(observationCreateOutcome.getId().getIdPart());
+
+ assertEquals(1, getSPIDXDateCount(observationPid), "date index row for date");
+
+ ourLog.info("Then a SP is created after that matches data in the Observation");
+ SearchParameter sp = myFhirContext.newJsonParser().parseResource(SearchParameter.class, """
+ {
+ "resourceType": "SearchParameter",
+ "id": "observation-date2",
+ "status": "active",
+ "code": "date2",
+ "name": "date2",
+ "base": [ "Observation" ],
+ "type": "date",
+ "expression": "Observation.effective"
+ }
+ """);
+ this.myStorageSettings.setMarkResourcesForReindexingUponSearchParameterChange(false);
+ callInFreshTx((tx, rd) -> {
+ DaoMethodOutcome result = mySearchParameterDao.update(sp, rd);
+ mySearchParamRegistry.forceRefresh();
+ return result;
+ });
+
+ assertEquals(1, getSPIDXDateCount(observationPid), "still only one index row before reindex");
+
+
+ // suppose reindex job step starts here and loads the resource and ResourceTable entity
+ ThreadFactory loggingThreadFactory = getLoggingThreadFactory("Reindex-thread");
+ ExecutorService backgroundReindexThread = Executors.newSingleThreadExecutor(loggingThreadFactory);
+ Future backgroundResult = backgroundReindexThread.submit(() -> {
+ try {
+ callInFreshTx((tx, rd) -> {
+ try {
+ ourLog.info("Starting background $reindex");
+ phaser.arriveAndAwaitSharedEndOf(Steps.STARTING);
+
+ phaser.assertInPhase(Steps.RUN_REINDEX);
+ ourLog.info("Run $reindex");
+ myObservationDao.reindex(JpaPid.fromIdAndResourceType(observationPid, "Observation"), rd, new TransactionDetails());
+
+ ourLog.info("$reindex done release main thread to delete");
+ phaser.arriveAndAwaitSharedEndOf(Steps.RUN_REINDEX);
+
+ ourLog.info("Wait for delete to finish");
+ phaser.arriveAndAwaitSharedEndOf(Steps.RUN_DELETE);
+
+ phaser.assertInPhase(Steps.COMMIT_REINDEX);
+ ourLog.info("Commit $reindex now that delete is finished");
+ // commit happens here at end of block
+ } catch (Exception e) {
+ ourLog.error("$reindex thread failed", e);
+ }
+ return 0;
+ });
+ } finally {
+ phaser.arriveAndAwaitSharedEndOf(Steps.COMMIT_REINDEX);
+ }
+ return 1;
+ });
+
+ ourLog.info("Wait for $reindex to start");
+ phaser.arriveAndAwaitSharedEndOf(Steps.STARTING);
+
+ phaser.arriveAndAwaitSharedEndOf(Steps.RUN_REINDEX);
+
+ // then the resource is deleted
+ phaser.assertInPhase(Steps.RUN_DELETE);
+
+ ourLog.info("Deleting observation");
+ callInFreshTx((tx, rd) -> myObservationDao.delete(observationId, rd));
+ assertResourceDeleted(observationId);
+ assertEquals(0, getSPIDXDateCount(observationPid), "A deleted resource should have 0 index rows");
+
+ ourLog.info("Let $reindex commit");
+ phaser.arriveAndAwaitSharedEndOf(Steps.RUN_DELETE);
+
+ // then the reindex call finishes
+ ourLog.info("Await $reindex commit");
+ phaser.arriveAndAwaitSharedEndOf(Steps.COMMIT_REINDEX);
+
+ assertEquals(0, getSPIDXDateCount(observationPid), "A deleted resource should still have 0 index rows, after $reindex completes");
+ }
+
+ @Nonnull
+ static ThreadFactory getLoggingThreadFactory(String theThreadName) {
+ ThreadFactory loggingThreadFactory = r -> new Thread(() -> {
+ boolean success = false;
+ try {
+ r.run();
+ success = true;
+ } finally {
+ if (!success) {
+ ourLog.error("Background thread failed");
+ }
+ }
+ }, theThreadName);
+ return loggingThreadFactory;
+ }
+
+ void assertResourceDeleted(IIdType observationId) {
+ try {
+ // confirm deleted
+ callInFreshTx((tx, rd)->
+ myObservationDao.read(observationId, rd, false));
+ fail("Read deleted resource");
+ } catch (ResourceGoneException e) {
+ // expected
+ }
+ }
+
+ T callInFreshTx(BiFunction theCallback) {
+ SystemRequestDetails requestDetails = new SystemRequestDetails();
+ return myHapiTransactionService.withRequest(requestDetails)
+ .withTransactionDetails(new TransactionDetails())
+ .withPropagation(Propagation.REQUIRES_NEW)
+ .execute(tx-> theCallback.apply(tx, requestDetails));
+ }
+
+
+ int getSPIDXDateCount(long observationPid) {
+ return callInFreshTx((rd, tx) ->
+ myResourceIndexedSearchParamDateDao.findAllForResourceId(observationPid).size());
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java
index 63587c86375..4171540f847 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImplTest.java
@@ -1,6 +1,7 @@
package ca.uhn.fhir.jpa.search.reindex;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
@@ -10,7 +11,6 @@ import ca.uhn.fhir.jpa.dao.data.IResourceReindexJobDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.entity.ResourceReindexJobEntity;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
-import ca.uhn.fhir.jpa.test.BaseJpaTest;
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
@@ -51,7 +51,7 @@ import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
-public class ResourceReindexingSvcImplTest extends BaseJpaTest {
+public class ResourceReindexingSvcImplTest {
private static final FhirContext ourFhirContext = FhirContext.forR4Cached();
@@ -87,21 +87,10 @@ public class ResourceReindexingSvcImplTest extends BaseJpaTest {
private final ResourceReindexer myResourceReindexer = new ResourceReindexer(ourFhirContext);
@InjectMocks
private final ResourceReindexingSvcImpl mySvc = new ResourceReindexingSvcImpl();
+ private JpaStorageSettings myStorageSettings = new JpaStorageSettings();
- @Override
- public FhirContext getFhirContext() {
- return ourFhirContext;
- }
-
- @Override
- protected PlatformTransactionManager getTxManager() {
- return myTxManager;
- }
-
- @Override
@BeforeEach
public void before() throws Exception {
- super.before();
myStorageSettings.setReindexThreadCount(2);
mySvc.setContextForUnitTest(ourFhirContext);
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java
index 68499535fbf..6c6b348e475 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/stresstest/GiantTransactionPerfTest.java
@@ -26,8 +26,8 @@ import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor;
import ca.uhn.fhir.jpa.dao.r4.FhirSystemDaoR4;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
+import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
-import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
@@ -76,6 +76,7 @@ import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionException;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.SimpleTransactionStatus;
+import org.springframework.transaction.support.TransactionSynchronizationManager;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
@@ -265,6 +266,7 @@ public class GiantTransactionPerfTest {
myEobDao.setIdHelperSvcForUnitTest(myIdHelperService);
myEobDao.setPartitionSettingsForUnitTest(myPartitionSettings);
myEobDao.setJpaStorageResourceParserForUnitTest(myJpaStorageResourceParser);
+ myEobDao.setExternallyStoredResourceServiceRegistryForUnitTest(new ExternallyStoredResourceServiceRegistry());
myEobDao.start();
myDaoRegistry.setResourceDaos(Lists.newArrayList(myEobDao));
@@ -869,7 +871,7 @@ public class GiantTransactionPerfTest {
private static class MockRequestPartitionHelperSvc implements ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc {
@Nonnull
@Override
- public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType, @Nonnull ReadPartitionIdRequestDetails theDetails) {
+ public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull ReadPartitionIdRequestDetails theDetails) {
return RequestPartitionId.defaultPartition();
}
@@ -884,12 +886,6 @@ public class GiantTransactionPerfTest {
return RequestPartitionId.defaultPartition();
}
- @Override
- @Nonnull
- public PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId) {
- return new PartitionablePartitionId(theRequestPartitionId.getFirstPartitionIdOrNull(), theRequestPartitionId.getPartitionDate());
- }
-
@Nonnull
@Override
public Set toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId) {
@@ -911,17 +907,18 @@ public class GiantTransactionPerfTest {
@Nonnull
@Override
public TransactionStatus getTransaction(TransactionDefinition definition) throws TransactionException {
+ TransactionSynchronizationManager.setActualTransactionActive(true);
return new SimpleTransactionStatus();
}
@Override
public void commit(@Nonnull TransactionStatus status) throws TransactionException {
-
+ TransactionSynchronizationManager.setActualTransactionActive(false);
}
@Override
public void rollback(@Nonnull TransactionStatus status) throws TransactionException {
-
+ TransactionSynchronizationManager.setActualTransactionActive(false);
}
}
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/message/MessageSubscriptionR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/message/MessageSubscriptionR4Test.java
index 3c31636f9f3..df71e873216 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/message/MessageSubscriptionR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/subscription/message/MessageSubscriptionR4Test.java
@@ -1,6 +1,8 @@
package ca.uhn.fhir.jpa.subscription.message;
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
+import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
+import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
import ca.uhn.fhir.jpa.subscription.BaseSubscriptionsR4Test;
import ca.uhn.fhir.jpa.subscription.channel.api.ChannelConsumerSettings;
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelReceiver;
@@ -8,11 +10,11 @@ import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFact
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedJsonMessage;
import ca.uhn.fhir.jpa.subscription.model.ResourceModifiedMessage;
import ca.uhn.fhir.jpa.test.util.StoppableSubscriptionDeliveringRestHookSubscriber;
-import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource;
-import org.hl7.fhir.r4.model.CodeableConcept;
+import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.Observation;
+import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Subscription;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@@ -20,18 +22,19 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
-import org.junit.jupiter.params.provider.ValueSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import java.util.List;
+import java.util.stream.Collectors;
import java.util.stream.Stream;
-import static org.awaitility.Awaitility.await;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.notNullValue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Test the rest-hook subscriptions
@@ -50,7 +53,9 @@ public class MessageSubscriptionR4Test extends BaseSubscriptionsR4Test {
myStoppableSubscriptionDeliveringRestHookSubscriber.setCountDownLatch(null);
myStoppableSubscriptionDeliveringRestHookSubscriber.unPause();
myStorageSettings.setTriggerSubscriptionsForNonVersioningChanges(new JpaStorageSettings().isTriggerSubscriptionsForNonVersioningChanges());
+ myStorageSettings.setTagStorageMode(new JpaStorageSettings().getTagStorageMode());
}
+
@BeforeEach
public void beforeRegisterRestHookListener() {
mySubscriptionTestUtil.registerMessageInterceptor();
@@ -60,11 +65,11 @@ public class MessageSubscriptionR4Test extends BaseSubscriptionsR4Test {
receiver.subscribe(handler);
}
- private Subscription createObservationSubscription() {
+ private Subscription createSubscriptionWithCriteria(String theCriteria) {
Subscription subscription = new Subscription();
subscription.setReason("Monitor new neonatal function (note, age will be determined by the monitor)");
subscription.setStatus(Subscription.SubscriptionStatus.REQUESTED);
- subscription.setCriteria("[Observation]");
+ subscription.setCriteria(theCriteria);
Subscription.SubscriptionChannelComponent channel = subscription.getChannel();
channel.setType(Subscription.SubscriptionChannelType.MESSAGE);
@@ -92,7 +97,7 @@ public class MessageSubscriptionR4Test extends BaseSubscriptionsR4Test {
@MethodSource("sourceTypes")
public void testCreateUpdateAndPatchRetainCorrectSourceThroughDelivery(JpaStorageSettings.StoreMetaSourceInformationEnum theStorageStyle, String theExplicitSource, String theRequestId, String theExpectedSourceValue) throws Exception {
myStorageSettings.setStoreMetaSourceInformation(theStorageStyle);
- createObservationSubscription();
+ createSubscriptionWithCriteria("[Observation]");
waitForActivatedSubscriptionCount(1);
@@ -106,19 +111,64 @@ public class MessageSubscriptionR4Test extends BaseSubscriptionsR4Test {
waitForQueueToDrain();
//Should receive at our queue receiver
- Observation receivedObs = fetchSingleObservationFromSubscriptionTerminalEndpoint();
+ IBaseResource resource = fetchSingleResourceFromSubscriptionTerminalEndpoint();
+ assertThat(resource, instanceOf(Observation.class));
+ Observation receivedObs = (Observation) resource;
assertThat(receivedObs.getMeta().getSource(), is(equalTo(theExpectedSourceValue)));
}
- private Observation fetchSingleObservationFromSubscriptionTerminalEndpoint() {
+ @Test
+ public void testUpdateResourceRetainCorrectMetaTagsThroughDelivery() throws Exception {
+ myStorageSettings.setTagStorageMode(JpaStorageSettings.TagStorageModeEnum.NON_VERSIONED);
+ createSubscriptionWithCriteria("[Patient]");
+
+ waitForActivatedSubscriptionCount(1);
+
+ // Create Patient with two meta tags
+ Patient patient = new Patient();
+ patient.setActive(true);
+ patient.getMeta().addTag().setSystem("http://www.example.com/tags").setCode("tag-1");
+ patient.getMeta().addTag().setSystem("http://www.example.com/tags").setCode("tag-2");
+
+ IIdType id = myClient.create().resource(patient).execute().getId();
+
+ // Should see 1 subscription notification for CREATE
+ waitForQueueToDrain();
+
+ // Should receive two meta tags
+ IBaseResource resource = fetchSingleResourceFromSubscriptionTerminalEndpoint();
+ assertThat(resource, instanceOf(Patient.class));
+ Patient receivedPatient = (Patient) resource;
+ assertThat(receivedPatient.getMeta().getTag().size(), is(equalTo(2)));
+
+ // Update the previous Patient and add one more tag
+ patient = new Patient();
+ patient.setId(id);
+ patient.setActive(true);
+ patient.getMeta().getTag().add(new Coding().setSystem("http://www.example.com/tags").setCode("tag-3"));
+ myClient.update().resource(patient).execute();
+
+ waitForQueueToDrain();
+
+ // Should receive all three meta tags
+ List expected = List.of("tag-1", "tag-2", "tag-3");
+ resource = fetchSingleResourceFromSubscriptionTerminalEndpoint();
+ receivedPatient = (Patient) resource;
+ List receivedTagList = receivedPatient.getMeta().getTag();
+ ourLog.info(getFhirContext().newJsonParser().setPrettyPrint(true).encodeResourceToString(receivedPatient));
+ assertThat(receivedTagList.size(), is(equalTo(3)));
+ List actual = receivedTagList.stream().map(t -> t.getCode()).sorted().collect(Collectors.toList());
+ assertTrue(expected.equals(actual));
+ }
+
+ private IBaseResource fetchSingleResourceFromSubscriptionTerminalEndpoint() {
assertThat(handler.getMessages().size(), is(equalTo(1)));
ResourceModifiedJsonMessage resourceModifiedJsonMessage = handler.getMessages().get(0);
ResourceModifiedMessage payload = resourceModifiedJsonMessage.getPayload();
String payloadString = payload.getPayloadString();
IBaseResource resource = myFhirContext.newJsonParser().parseResource(payloadString);
- Observation receivedObs = (Observation) resource;
handler.clearMessages();
- return receivedObs;
+ return resource;
}
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/BaseTermR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/BaseTermR4Test.java
index 8fceada16bb..9b0f3063a8f 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/BaseTermR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/BaseTermR4Test.java
@@ -27,8 +27,10 @@ public abstract class BaseTermR4Test extends BaseJpaR4Test {
Long myExtensionalCsIdOnResourceTable;
Long myExtensionalVsIdOnResourceTable;
+ @Override
@BeforeEach
- public void before() {
+ public void before() throws Exception {
+ super.before();
myStorageSettings.setAllowExternalReferences(true);
}
@@ -143,7 +145,7 @@ public abstract class BaseTermR4Test extends BaseJpaR4Test {
default:
throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb);
}
- myExtensionalCsIdOnResourceTable = myCodeSystemDao.readEntity(myExtensionalCsId, null).getId();
+ myExtensionalCsIdOnResourceTable = (Long) myCodeSystemDao.readEntity(myExtensionalCsId, null).getPersistentId().getId();
}
void loadAndPersistValueSet(HttpVerb theVerb) throws IOException {
@@ -180,7 +182,7 @@ public abstract class BaseTermR4Test extends BaseJpaR4Test {
default:
throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb);
}
- myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getId();
+ myExtensionalVsIdOnResourceTable = (Long) myValueSetDao.readEntity(myExtensionalVsId, null).getPersistentId().getId();
}
}
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java
index e4f8c3a23b4..2edf0f57ee5 100644
--- a/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java
+++ b/hapi-fhir-jpaserver-test-r4/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java
@@ -53,6 +53,8 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test {
ConceptValidationOptions optsNoGuess = new ConceptValidationOptions();
ConceptValidationOptions optsGuess = new ConceptValidationOptions().setInferSystem(true);
+ @Autowired
+ private Batch2JobHelper myBatchJobHelper;
@Override
@AfterEach
@@ -479,6 +481,7 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test {
await().until(() -> {
myBatch2JobHelper.runMaintenancePass();
myTerminologyDeferredStorageSvc.saveAllDeferred();
+ myBatchJobHelper.awaitAllJobsOfJobDefinitionIdToComplete(TERM_CODE_SYSTEM_VERSION_DELETE_JOB_NAME);
return myTerminologyDeferredStorageSvc.isStorageQueueEmpty(true);
}, equalTo(true));
diff --git a/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml
new file mode 100644
index 00000000000..dcada7fec76
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4/src/test/resources/logback-test.xml
@@ -0,0 +1,46 @@
+
+
+
+ %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hapi-fhir-jpaserver-test-r4b/pom.xml b/hapi-fhir-jpaserver-test-r4b/pom.xml
index ac59ecbb484..ecc32818129 100644
--- a/hapi-fhir-jpaserver-test-r4b/pom.xml
+++ b/hapi-fhir-jpaserver-test-r4b/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r4b/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-r4b/src/test/resources/logback-test.xml
new file mode 100644
index 00000000000..dcada7fec76
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r4b/src/test/resources/logback-test.xml
@@ -0,0 +1,46 @@
+
+
+
+ %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hapi-fhir-jpaserver-test-r5/pom.xml b/hapi-fhir-jpaserver-test-r5/pom.xml
index ed50c6255da..38acf3ffde3 100644
--- a/hapi-fhir-jpaserver-test-r5/pom.xml
+++ b/hapi-fhir-jpaserver-test-r5/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
index ec718d86496..bbf7b0ece51 100644
--- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java
@@ -45,6 +45,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
import ca.uhn.fhir.jpa.entity.TermValueSet;
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.provider.JpaSystemProvider;
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
import ca.uhn.fhir.jpa.search.IStaleSearchDeletingSvc;
@@ -140,6 +141,8 @@ import static org.mockito.Mockito.mock;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {TestR5Config.class})
public abstract class BaseJpaR5Test extends BaseJpaTest implements ITestDataBuilder {
+ @Autowired
+ protected PartitionSettings myPartitionSettings;
@Autowired
protected ITermCodeSystemStorageSvc myTermCodeSystemStorageSvc;
@Autowired
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java
new file mode 100644
index 00000000000..e5aea038ccf
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/CrossPartitionReferencesTest.java
@@ -0,0 +1,247 @@
+package ca.uhn.fhir.jpa.dao.r5;
+
+import ca.uhn.fhir.interceptor.api.Hook;
+import ca.uhn.fhir.interceptor.api.Interceptor;
+import ca.uhn.fhir.interceptor.api.Pointcut;
+import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
+import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
+import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
+import ca.uhn.fhir.jpa.model.dao.JpaPid;
+import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
+import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
+import ca.uhn.fhir.jpa.searchparam.extractor.CrossPartitionReferenceDetails;
+import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver;
+import ca.uhn.fhir.jpa.util.JpaHapiTransactionService;
+import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.param.TokenParam;
+import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
+import org.hl7.fhir.instance.model.api.IBaseResource;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r5.model.Enumerations;
+import org.hl7.fhir.r5.model.Observation;
+import org.hl7.fhir.r5.model.Patient;
+import org.hl7.fhir.r5.model.Reference;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Captor;
+import org.mockito.Mock;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.transaction.annotation.Propagation;
+
+import javax.annotation.Nonnull;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.matchesPattern;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class CrossPartitionReferencesTest extends BaseJpaR5Test {
+
+ public static final RequestPartitionId PARTITION_PATIENT = RequestPartitionId.fromPartitionId(1);
+ public static final RequestPartitionId PARTITION_OBSERVATION = RequestPartitionId.fromPartitionId(2);
+ @Autowired
+ private JpaHapiTransactionService myTransactionSvc;
+ @Autowired
+ private IRequestPartitionHelperSvc myPartitionHelperSvc;
+ @Autowired
+ private IHapiTransactionService myTransactionService;
+ @Autowired
+ private IResourceLinkResolver myResourceLinkResolver;
+ @Mock
+ private ICrossPartitionReferenceDetectedHandler myCrossPartitionReferencesDetectedInterceptor;
+ @Captor
+ private ArgumentCaptor myCrossPartitionReferenceDetailsCaptor;
+
+ @Override
+ @BeforeEach
+ public void before() throws Exception {
+ super.before();
+
+ myPartitionSettings.setPartitioningEnabled(true);
+ myPartitionSettings.setAllowReferencesAcrossPartitions(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
+ myPartitionSettings.setUnnamedPartitionMode(true);
+ myPartitionSettings.setAlwaysOpenNewTransactionForDifferentPartition(true);
+
+ myInterceptorRegistry.registerInterceptor(new MyPartitionSelectorInterceptor());
+ myInterceptorRegistry.registerInterceptor(myCrossPartitionReferencesDetectedInterceptor);
+
+ myTransactionSvc.setTransactionPropagationWhenChangingPartitions(Propagation.REQUIRES_NEW);
+ }
+
+ @AfterEach
+ public void after() {
+ myPartitionSettings.setPartitioningEnabled(new PartitionSettings().isPartitioningEnabled());
+ myPartitionSettings.setAllowReferencesAcrossPartitions(new PartitionSettings().getAllowReferencesAcrossPartitions());
+ myPartitionSettings.setUnnamedPartitionMode(new PartitionSettings().isUnnamedPartitionMode());
+ myPartitionSettings.setAlwaysOpenNewTransactionForDifferentPartition(new PartitionSettings().isAlwaysOpenNewTransactionForDifferentPartition());
+
+ myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof MyPartitionSelectorInterceptor);
+ myInterceptorRegistry.unregisterInterceptorsIf(t -> t instanceof ICrossPartitionReferenceDetectedHandler);
+
+ myTransactionSvc.setTransactionPropagationWhenChangingPartitions(Propagation.REQUIRED);
+ }
+
+ @Test
+ public void testSamePartitionReference_Create() {
+ // Setup
+ Patient p1 = new Patient();
+ p1.setActive(true);
+ IIdType patient1Id = myPatientDao.create(p1, mySrd).getId().toUnqualifiedVersionless();
+
+ initializeCrossReferencesInterceptor();
+
+ myCaptureQueriesListener.clear();
+ Patient p2 = new Patient();
+ p2.setActive(true);
+ p2.addLink().setOther(new Reference(patient1Id));
+
+ // Test
+ myCaptureQueriesListener.clear();
+ IIdType patient2Id = myPatientDao.create(p2, mySrd).getId().toUnqualifiedVersionless();
+
+ // Verify
+ myCaptureQueriesListener.logSelectQueries();
+ assertEquals(1, myCaptureQueriesListener.countCommits());
+ assertEquals(0, myCaptureQueriesListener.countRollbacks());
+
+ SearchParameterMap params = SearchParameterMap
+ .newSynchronous(Constants.PARAM_ID, new TokenParam(patient2Id.getValue()))
+ .addInclude(Patient.INCLUDE_LINK);
+ IBundleProvider search = myPatientDao.search(params, mySrd);
+ assertThat(toUnqualifiedVersionlessIdValues(search), contains(patient2Id.getValue(), patient1Id.getValue()));
+ assertEquals(2, search.getAllResources().size());
+ search.getAllResources().forEach(p -> assertTrue(((Patient) p).getActive()));
+ }
+
+ @Test
+ public void testSamePartitionReference_SearchWithInclude() {
+ // Setup
+ Patient p1 = new Patient();
+ p1.setActive(true);
+ IIdType patient1Id = myPatientDao.create(p1, mySrd).getId().toUnqualifiedVersionless();
+
+ initializeCrossReferencesInterceptor();
+
+ myCaptureQueriesListener.clear();
+ Patient p2 = new Patient();
+ p2.setActive(true);
+ p2.addLink().setOther(new Reference(patient1Id));
+ IIdType patient2Id = myPatientDao.create(p2, mySrd).getId().toUnqualifiedVersionless();
+
+ // Test
+ SearchParameterMap params = SearchParameterMap
+ .newSynchronous(Constants.PARAM_ID, new TokenParam(patient2Id.getValue()))
+ .addInclude(Patient.INCLUDE_LINK);
+ IBundleProvider search = myPatientDao.search(params, mySrd);
+
+ // Verify
+ assertThat(toUnqualifiedVersionlessIdValues(search), contains(patient2Id.getValue(), patient1Id.getValue()));
+ assertEquals(2, search.getAllResources().size());
+ search.getAllResources().forEach(p -> assertTrue(((Patient) p).getActive()));
+ }
+
+ @Test
+ public void testCrossPartitionReference_Create() {
+
+ // Setup
+ Patient p = new Patient();
+ p.setActive(true);
+ IIdType patientId = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
+ ourLog.info("Patient ID: {}", patientId);
+ runInTransaction(() -> {
+ ResourceTable resourceTable = myResourceTableDao.findById(patientId.getIdPartAsLong()).orElseThrow();
+ assertEquals(1, resourceTable.getPartitionId().getPartitionId());
+ });
+
+ initializeCrossReferencesInterceptor();
+
+ // Test
+
+ Observation o = new Observation();
+ o.setStatus(Enumerations.ObservationStatus.FINAL);
+ o.setSubject(new Reference(patientId));
+ myCaptureQueriesListener.clear();
+ IIdType observationId = myObservationDao.create(o, mySrd).getId().toUnqualifiedVersionless();
+
+ // Verify
+ assertEquals(2, myCaptureQueriesListener.countCommits());
+ assertEquals(0, myCaptureQueriesListener.countRollbacks());
+
+ runInTransaction(() -> {
+ ResourceTable resourceTable = myResourceTableDao.findById(observationId.getIdPartAsLong()).orElseThrow();
+ assertEquals(2, resourceTable.getPartitionId().getPartitionId());
+ });
+
+ verify(myCrossPartitionReferencesDetectedInterceptor, times(1)).handle(eq(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE), myCrossPartitionReferenceDetailsCaptor.capture());
+ CrossPartitionReferenceDetails referenceDetails = myCrossPartitionReferenceDetailsCaptor.getValue();
+ assertEquals(PARTITION_OBSERVATION, referenceDetails.getSourceResourcePartitionId());
+ assertEquals(patientId.getValue(), referenceDetails.getPathAndRef().getRef().getReferenceElement().getValue());
+ }
+
+ private void initializeCrossReferencesInterceptor() {
+ when(myCrossPartitionReferencesDetectedInterceptor.handle(any(),any())).thenAnswer(t->{
+ CrossPartitionReferenceDetails theDetails = t.getArgument(1, CrossPartitionReferenceDetails.class);
+ IIdType targetId = theDetails.getPathAndRef().getRef().getReferenceElement();
+ ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forRead(targetId);
+ RequestPartitionId referenceTargetPartition = myPartitionHelperSvc.determineReadPartitionForRequest(theDetails.getRequestDetails(), details);
+
+ IResourceLookup targetResource = myTransactionService
+ .withRequest(theDetails.getRequestDetails())
+ .withTransactionDetails(theDetails.getTransactionDetails())
+ .withRequestPartitionId(referenceTargetPartition)
+ .execute(() -> myResourceLinkResolver.findTargetResource(referenceTargetPartition, theDetails.getSourceResourceName(), theDetails.getPathAndRef(), theDetails.getRequestDetails(), theDetails.getTransactionDetails()));
+
+ return targetResource;
+ });
+ }
+
+ @Interceptor
+ public interface ICrossPartitionReferenceDetectedHandler {
+
+ @Hook(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE)
+ IResourceLookup handle(Pointcut thePointcut, CrossPartitionReferenceDetails theDetails);
+
+ }
+
+
+ public class MyPartitionSelectorInterceptor {
+
+ @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE)
+ public RequestPartitionId selectPartitionCreate(IBaseResource theResource) {
+ String resourceType = myFhirContext.getResourceType(theResource);
+ return selectPartition(resourceType);
+ }
+
+ @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
+ public RequestPartitionId selectPartitionRead(ReadPartitionIdRequestDetails theReadPartitionIdRequestDetails) {
+ String resourceType = theReadPartitionIdRequestDetails.getResourceType();
+ return selectPartition(resourceType);
+ }
+
+ @Nonnull
+ private static RequestPartitionId selectPartition(String resourceType) {
+ switch (resourceType) {
+ case "Patient":
+ return PARTITION_PATIENT;
+ case "Observation":
+ return PARTITION_OBSERVATION;
+ default:
+ throw new InternalErrorException("Don't know how to handle resource type");
+ }
+ }
+
+ }
+
+}
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java
new file mode 100644
index 00000000000..173f1899a33
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/dao/r5/ExternallyStoredResourceR5Test.java
@@ -0,0 +1,86 @@
+package ca.uhn.fhir.jpa.dao.r5;
+
+import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddress;
+import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddressMetadataKey;
+import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
+import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService;
+import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
+import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
+import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import org.hl7.fhir.instance.model.api.IIdType;
+import org.hl7.fhir.r5.model.Patient;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mock;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.when;
+
+public class ExternallyStoredResourceR5Test extends BaseJpaR5Test {
+
+ public static final String MY_PROVIDER_ID = "my-provider-id";
+ public static final String ADDRESS_123 = "address_123";
+ @Autowired
+ private ExternallyStoredResourceServiceRegistry myProviderRegistry;
+
+ @Mock
+ private IExternallyStoredResourceService myProvider;
+
+ @BeforeEach
+ public void beforeEach() {
+ when(myProvider.getId()).thenReturn(MY_PROVIDER_ID);
+ myProviderRegistry.registerProvider(myProvider);
+ }
+
+ @AfterEach
+ public void afterEach() {
+ myProviderRegistry.clearProviders();
+ }
+
+ @Test
+ public void testCreate() {
+ // Test
+ IIdType id = storePatientWithExternalAddress();
+
+ // Verify
+ runInTransaction(()->{
+ ResourceTable resource = myResourceTableDao.getReferenceById(id.getIdPartAsLong());
+ assertNotNull(resource);
+ ResourceHistoryTable history = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1L);
+ assertNotNull(history);
+ assertEquals(ResourceEncodingEnum.ESR, history.getEncoding());
+ assertEquals(MY_PROVIDER_ID + ":" + ADDRESS_123, history.getResourceTextVc());
+ });
+
+ }
+
+
+ @Test
+ public void testRead() {
+ // Setup
+ IIdType id = storePatientWithExternalAddress();
+ Patient patient = new Patient();
+ patient.setActive(true);
+ when(myProvider.fetchResource(any())).thenReturn(patient);
+
+ // Test
+ Patient fetchedPatient = myPatientDao.read(id, mySrd);
+ assertTrue(fetchedPatient.getActive());
+
+ }
+
+ private IIdType storePatientWithExternalAddress() {
+ Patient p = new Patient();
+ ExternallyStoredResourceAddress address = new ExternallyStoredResourceAddress(MY_PROVIDER_ID, ADDRESS_123);
+ ExternallyStoredResourceAddressMetadataKey.INSTANCE.put(p, address);
+ IIdType id = myPatientDao.create(p, mySrd).getId().toUnqualifiedVersionless();
+ return id;
+ }
+
+
+}
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java
index ac43734dcc6..464e597f563 100644
--- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java
@@ -152,7 +152,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test {
default:
throw new IllegalArgumentException("HTTP verb is not supported: " + theVerb);
}
- myExtensionalVsIdOnResourceTable = myValueSetDao.readEntity(myExtensionalVsId, null).getId();
+ myExtensionalVsIdOnResourceTable = (Long) myValueSetDao.readEntity(myExtensionalVsId, null).getPersistentId().getId();
}
private CodeSystem createExternalCs() {
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java
index 72427a4d46b..b99f702ce22 100644
--- a/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java
+++ b/hapi-fhir-jpaserver-test-r5/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java
@@ -102,7 +102,7 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide
myExtensionalCsId_v1 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId_v1, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId_v1, null);
theCodeSystem.setId("CodeSystem/cs2");
theCodeSystem.setVersion("2");
@@ -115,7 +115,7 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide
myExtensionalCsId_v2 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
}
});
- myCodeSystemDao.readEntity(myExtensionalCsId_v2, null).getId();
+ myCodeSystemDao.readEntity(myExtensionalCsId_v2, null);
}
@@ -125,13 +125,13 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide
valueSet.setId("ValueSet/vs1");
valueSet.getCompose().getInclude().get(0).setVersion("1");
myExtensionalVsId_v1 = persistSingleValueSet(valueSet, HttpVerb.POST);
- myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getId();
+ myExtensionalVsIdOnResourceTable_v1 = (Long) myValueSetDao.readEntity(myExtensionalVsId_v1, null).getPersistentId().getId();
valueSet.setVersion("2");
valueSet.setId("ValueSet/vs2");
valueSet.getCompose().getInclude().get(0).setVersion("2");
myExtensionalVsId_v2 = persistSingleValueSet(valueSet, HttpVerb.POST);
- myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getId();
+ myExtensionalVsIdOnResourceTable_v2 = (Long) myValueSetDao.readEntity(myExtensionalVsId_v2, null).getPersistentId().getId();
}
diff --git a/hapi-fhir-jpaserver-test-r5/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-test-r5/src/test/resources/logback-test.xml
new file mode 100644
index 00000000000..dcada7fec76
--- /dev/null
+++ b/hapi-fhir-jpaserver-test-r5/src/test/resources/logback-test.xml
@@ -0,0 +1,46 @@
+
+
+
+ %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hapi-fhir-jpaserver-test-utilities/pom.xml b/hapi-fhir-jpaserver-test-utilities/pom.xml
index 7c954543cce..46a987dda60 100644
--- a/hapi-fhir-jpaserver-test-utilities/pom.xml
+++ b/hapi-fhir-jpaserver-test-utilities/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java
index a351e7f4dea..2eaf9a4c56f 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java
@@ -115,8 +115,11 @@ public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test {
ourRestServer.getInterceptorService().unregisterAllInterceptors();
}
+ @Override
@BeforeEach
public void before() throws Exception {
+ super.before();
+
myFhirContext.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER);
myFhirContext.getRestfulClientFactory().setSocketTimeout(1200 * 1000);
myFhirContext.setParserErrorHandler(new StrictErrorHandler());
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
index 717fac3d358..5a8567dad00 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaR4Test.java
@@ -103,6 +103,7 @@ import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.BasePagingProvider;
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
+import ca.uhn.fhir.storage.test.DaoTestDataBuilder;
import ca.uhn.fhir.test.utilities.ITestDataBuilder;
import ca.uhn.fhir.util.UrlUtil;
import ca.uhn.fhir.validation.FhirValidator;
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java
index 1d51a1709d6..54002cf35b7 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/BaseJpaTest.java
@@ -73,6 +73,7 @@ import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
+import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@@ -778,7 +779,7 @@ public abstract class BaseJpaTest extends BaseTest {
for (int count = 0; ; count++) {
try {
- theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true), null);
+ theSystemDao.expunge(new ExpungeOptions().setExpungeEverything(true), new SystemRequestDetails());
break;
} catch (Exception e) {
if (count >= 3) {
diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java
index fb32c171f0d..a01fc128d70 100644
--- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java
+++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/test/config/TestR5Config.java
@@ -158,6 +158,7 @@ public class TestR5Config {
.afterQuery(captureQueriesListener())
.afterQuery(new CurrentThreadCaptureQueriesListener())
.countQuery(singleQueryCountHolder())
+ .afterMethod(captureQueriesListener())
.build();
return dataSource;
diff --git a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
index a1af87d765d..f927e80ba6f 100644
--- a/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
+++ b/hapi-fhir-jpaserver-uhnfhirtest/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-server-mdm/pom.xml b/hapi-fhir-server-mdm/pom.xml
index 94fb2076ff4..b6c300cad9a 100644
--- a/hapi-fhir-server-mdm/pom.xml
+++ b/hapi-fhir-server-mdm/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java
index 6ba5cbafcce..1176557e0e1 100644
--- a/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java
+++ b/hapi-fhir-server-mdm/src/main/java/ca/uhn/fhir/mdm/provider/MdmControllerHelper.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.mdm.provider;
import ca.uhn.fhir.i18n.Msg;
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.mdm.api.IMdmMatchFinderSvc;
@@ -137,7 +138,8 @@ public class MdmControllerHelper {
* Helper method which will return a bundle of all Matches and Possible Matches.
*/
public IBaseBundle getMatchesAndPossibleMatchesForResource(IAnyResource theResource, String theResourceType, RequestDetails theRequestDetails) {
- RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, theResourceType, null);
+ ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forSearchType(theResourceType, null, null);
+ RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details);
List matches = myMdmMatchFinderSvc.getMatchedTargets(theResourceType, theResource, requestPartitionId);
matches.sort(Comparator.comparing((MatchedTarget m) -> m.getMatchResult().getNormalizedScore()).reversed());
diff --git a/hapi-fhir-server-openapi/pom.xml b/hapi-fhir-server-openapi/pom.xml
index 36a2bc32d24..d31364d9923 100644
--- a/hapi-fhir-server-openapi/pom.xml
+++ b/hapi-fhir-server-openapi/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/pom.xml b/hapi-fhir-server/pom.xml
index e1622f46811..89a0bf2f33a 100644
--- a/hapi-fhir-server/pom.xml
+++ b/hapi-fhir-server/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/CompositeInterceptorBroadcaster.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/CompositeInterceptorBroadcaster.java
index db05162dc6b..239a3285d37 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/CompositeInterceptorBroadcaster.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/CompositeInterceptorBroadcaster.java
@@ -52,6 +52,7 @@ public class CompositeInterceptorBroadcaster {
return newCompositeBroadcaster(theInterceptorBroadcaster, theRequestDetails).callHooksAndReturnObject(thePointcut, theParams);
}
+ // TODO: JA - Refactor to make thePointcut the last argument in order to be consistent with thr other methods here
public static boolean hasHooks(Pointcut thePointcut, IInterceptorBroadcaster theInterceptorBroadcaster, RequestDetails theRequestDetails) {
return newCompositeBroadcaster(theInterceptorBroadcaster, theRequestDetails).hasHooks(thePointcut);
}
diff --git a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java
index d11bbe88130..8482b3e0380 100644
--- a/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java
+++ b/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/util/ICachedSearchDetails.java
@@ -22,6 +22,10 @@ package ca.uhn.fhir.rest.server.util;
public interface ICachedSearchDetails {
+ String getUuid();
+
+ void setUuid(String theUuid);
+
/**
* Mark the search as being non-reusable by future queries (as in, the search
* can serve multiple page requests for pages of the same search, but it
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
index 53b68b9f641..9787468a29d 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-api/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
index c296d73aa17..750b50229fa 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-caffeine/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../pom.xml
@@ -20,7 +20,7 @@
ca.uhn.hapi.fhir
hapi-fhir-caching-api
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
com.github.ben-manes.caffeine
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
index d28ef13b6d6..c561f4614c7 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-guava/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir-serviceloaders
ca.uhn.hapi.fhir
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
index 1fde459eddd..ed7be6107a6 100644
--- a/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
+++ b/hapi-fhir-serviceloaders/hapi-fhir-caching-testing/pom.xml
@@ -7,7 +7,7 @@
hapi-fhir
ca.uhn.hapi.fhir
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../../pom.xml
diff --git a/hapi-fhir-serviceloaders/pom.xml b/hapi-fhir-serviceloaders/pom.xml
index d4ffcdf51bb..36642d9c173 100644
--- a/hapi-fhir-serviceloaders/pom.xml
+++ b/hapi-fhir-serviceloaders/pom.xml
@@ -5,7 +5,7 @@
hapi-deployable-pom
ca.uhn.hapi.fhir
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
index 76585461d6f..16dca9e97e4 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-autoconfigure/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
index 7d5f78acc23..8217d41d0b1 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-apache/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
hapi-fhir-spring-boot-sample-client-apache
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
index 0c372797448..ccd488446cf 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-client-okhttp/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
hapi-fhir-spring-boot-sample-client-okhttp
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
index d9e847f3d13..2787d3eac70 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/hapi-fhir-spring-boot-sample-server-jersey/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot-samples
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
hapi-fhir-spring-boot-sample-server-jersey
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
index df6ea42bfc4..1ebbbffc63d 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-samples/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir-spring-boot
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
hapi-fhir-spring-boot-samples
diff --git a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
index e8ebd8e529e..5c845310bfb 100644
--- a/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
+++ b/hapi-fhir-spring-boot/hapi-fhir-spring-boot-starter/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-spring-boot/pom.xml b/hapi-fhir-spring-boot/pom.xml
index 85b7fb43578..01821fdf59d 100644
--- a/hapi-fhir-spring-boot/pom.xml
+++ b/hapi-fhir-spring-boot/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-fhir
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../pom.xml
diff --git a/hapi-fhir-sql-migrate/pom.xml b/hapi-fhir-sql-migrate/pom.xml
index 4c7b6206aec..00c24155a28 100644
--- a/hapi-fhir-sql-migrate/pom.xml
+++ b/hapi-fhir-sql-migrate/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-batch2-jobs/pom.xml b/hapi-fhir-storage-batch2-jobs/pom.xml
index 07fba5d71e1..d1b639ea92a 100644
--- a/hapi-fhir-storage-batch2-jobs/pom.xml
+++ b/hapi-fhir-storage-batch2-jobs/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
4.0.0
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java
index e2cec69a4b5..4da4bf751f7 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/expunge/DeleteExpungeJobSubmitterImpl.java
@@ -34,17 +34,17 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
-import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.IDeleteExpungeJobSubmitter;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
+import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
-
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
+
import java.util.List;
import static ca.uhn.fhir.batch2.jobs.expunge.DeleteExpungeAppCtx.JOB_DELETE_EXPUNGE;
@@ -91,9 +91,9 @@ public class DeleteExpungeJobSubmitterImpl implements IDeleteExpungeJobSubmitter
.forEach(deleteExpungeJobParameters::addPartitionedUrl);
deleteExpungeJobParameters.setBatchSize(theBatchSize);
- ReadPartitionIdRequestDetails details = new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null);
+ ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_DELETE_EXPUNGE);
// Also set toplevel partition in case there are no urls
- RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, details);
+ RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details);
deleteExpungeJobParameters.setRequestPartitionId(requestPartition);
JobInstanceStartRequest startRequest = new JobInstanceStartRequest();
diff --git a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java
index 930dbeba4e2..dbafc5733d4 100644
--- a/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java
+++ b/hapi-fhir-storage-batch2-jobs/src/main/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProvider.java
@@ -30,7 +30,6 @@ import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
-import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.util.ParametersUtil;
@@ -72,8 +71,8 @@ public class ReindexProvider {
.forEach(params::addPartitionedUrl);
}
- ReadPartitionIdRequestDetails details= new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null);
- RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, details);
+ ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX);
+ RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details);
params.setRequestPartitionId(requestPartition);
JobInstanceStartRequest request = new JobInstanceStartRequest();
diff --git a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProviderTest.java b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProviderTest.java
index c69ed89db66..9c97348815a 100644
--- a/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProviderTest.java
+++ b/hapi-fhir-storage-batch2-jobs/src/test/java/ca/uhn/fhir/batch2/jobs/reindex/ReindexProviderTest.java
@@ -69,7 +69,7 @@ public class ReindexProviderTest {
when(myJobCoordinator.startInstance(any()))
.thenReturn(createJobStartResponse());
- when(myRequestPartitionHelperSvc.determineReadPartitionForRequest(any(), any(), any())).thenReturn(RequestPartitionId.allPartitions());
+ when(myRequestPartitionHelperSvc.determineReadPartitionForRequest(any(), any())).thenReturn(RequestPartitionId.allPartitions());
}
private Batch2JobStartResponse createJobStartResponse() {
diff --git a/hapi-fhir-storage-batch2/pom.xml b/hapi-fhir-storage-batch2/pom.xml
index 0db68563665..5ef04eac996 100644
--- a/hapi-fhir-storage-batch2/pom.xml
+++ b/hapi-fhir-storage-batch2/pom.xml
@@ -6,7 +6,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-cr/pom.xml b/hapi-fhir-storage-cr/pom.xml
index 30a893b23a3..600778ae0d1 100644
--- a/hapi-fhir-storage-cr/pom.xml
+++ b/hapi-fhir-storage-cr/pom.xml
@@ -7,7 +7,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
diff --git a/hapi-fhir-storage-cr/src/test/resources/logback-test.xml b/hapi-fhir-storage-cr/src/test/resources/logback-test.xml
new file mode 100644
index 00000000000..e40a4e73953
--- /dev/null
+++ b/hapi-fhir-storage-cr/src/test/resources/logback-test.xml
@@ -0,0 +1,15 @@
+
+
+
+ INFO
+
+
+ %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n
+
+
+
+
+
+
+
+
diff --git a/hapi-fhir-storage-mdm/pom.xml b/hapi-fhir-storage-mdm/pom.xml
index 7163977bad8..0bc37154487 100644
--- a/hapi-fhir-storage-mdm/pom.xml
+++ b/hapi-fhir-storage-mdm/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
4.0.0
diff --git a/hapi-fhir-storage-test-utilities/pom.xml b/hapi-fhir-storage-test-utilities/pom.xml
index ea4f218dd6f..7d538cc8b00 100644
--- a/hapi-fhir-storage-test-utilities/pom.xml
+++ b/hapi-fhir-storage-test-utilities/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
4.0.0
diff --git a/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/storage/test/BaseTransactionProcessorTest.java b/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/storage/test/BaseTransactionProcessorTest.java
index abb20c6fab5..a7ce2524178 100644
--- a/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/storage/test/BaseTransactionProcessorTest.java
+++ b/hapi-fhir-storage-test-utilities/src/test/java/ca/uhn/fhir/storage/test/BaseTransactionProcessorTest.java
@@ -27,6 +27,7 @@ import org.hl7.fhir.r4.model.IdType;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
public class BaseTransactionProcessorTest {
@@ -102,4 +103,11 @@ public class BaseTransactionProcessorTest {
assertEquals(input, outcome);
}
+ @Test
+ void testUnqualifiedMatchUrlStart_RegexPatternMatches() {
+ String matchUrl = "patient-first-identifier=MRN%7C123456789";
+ boolean matchResult = BaseTransactionProcessor.UNQUALIFIED_MATCH_URL_START.matcher(matchUrl).find();
+ assertTrue(matchResult, "Failed to find a Regex match using Url '" + matchUrl + "'");
+ }
+
}
diff --git a/hapi-fhir-storage/pom.xml b/hapi-fhir-storage/pom.xml
index e3fef4e756d..c059d3505e4 100644
--- a/hapi-fhir-storage/pom.xml
+++ b/hapi-fhir-storage/pom.xml
@@ -5,7 +5,7 @@
ca.uhn.hapi.fhir
hapi-deployable-pom
- 6.5.2-SNAPSHOT
+ 6.5.3-SNAPSHOT
../hapi-deployable-pom/pom.xml
@@ -106,6 +106,11 @@
+
+ org.springframework
+ spring-tx
+
+
com.fasterxml.jackson.core
jackson-annotations
@@ -142,20 +147,20 @@
spring-test
test
-
- com.github.dnault
- xml-patch
-
-
- io.dogote
- json-patch
-
-
- org.springframework.data
- spring-data-commons
-
+
+ com.github.dnault
+ xml-patch
+
+
+ io.dogote
+ json-patch
+
+
+ org.springframework.data
+ spring-data-commons
+
-
+
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java
similarity index 96%
rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java
rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java
index 8a718e69d61..e55161fc900 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/IDaoRegistry.java
@@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.api;
/*-
* #%L
- * HAPI FHIR JPA Model
+ * HAPI FHIR Storage api
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java
index 600835e2ef2..0995e2c5b04 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/dao/IFhirResourceDao.java
@@ -27,8 +27,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
-import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
-import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.IQueryParameterType;
@@ -188,16 +187,17 @@ public interface IFhirResourceDao extends IDao {
/**
* Read a resource by its internal PID
+ *
* @throws ResourceNotFoundException If the ID is not known to the server
- * @throws ResourceGoneException If the resource has been deleted
+ * @throws ResourceGoneException If the resource has been deleted
*/
T readByPid(IResourcePersistentId thePid);
/**
* Read a resource by its internal PID
- * @throws ResourceNotFoundException If the ID is not known to the server
- * @throws ResourceGoneException If the resource has been deleted and theDeletedOk is true
*
+ * @throws ResourceNotFoundException If the ID is not known to the server
+ * @throws ResourceGoneException If the resource has been deleted and theDeletedOk is true
*/
default T readByPid(IResourcePersistentId thePid, boolean theDeletedOk) {
throw new UnsupportedOperationException(Msg.code(571));
@@ -206,31 +206,36 @@ public interface IFhirResourceDao extends IDao {
/**
* @param theRequestDetails The request details including permissions and partitioning information
* @throws ResourceNotFoundException If the ID is not known to the server
- * @throws ResourceGoneException If the resource has been deleted
+ * @throws ResourceGoneException If the resource has been deleted
*/
T read(IIdType theId, RequestDetails theRequestDetails);
/**
* Should deleted resources be returned successfully. This should be false for
* a normal FHIR read.
+ *
* @throws ResourceNotFoundException If the ID is not known to the server
- * @throws ResourceGoneException If the resource has been deleted and theDeletedOk is true
+ * @throws ResourceGoneException If the resource has been deleted and theDeletedOk is true
*/
T read(IIdType theId, RequestDetails theRequestDetails, boolean theDeletedOk);
- BaseHasResource readEntity(IIdType theId, RequestDetails theRequest);
-
/**
- * @param theCheckForForcedId If true, this method should fail if the requested ID contains a numeric PID which exists, but is
- * obscured by a "forced ID" so should not exist as far as the outside world is concerned.
+ * Read an entity from the database, and return it. Note that here we're talking about whatever the
+ * native database representation is, not the parsed {@link IBaseResource} instance.
+ *
+ * @param theId The resource ID to fetch
+ * @param theRequest The request details object associated with the request
*/
- BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest);
+ IBasePersistedResource readEntity(IIdType theId, RequestDetails theRequest);
/**
* Updates index tables associated with the given resource. Does not create a new
* version or update the resource's update time.
+ *
+ * @param theResource The FHIR resource object corresponding to the entity to reindex
+ * @param theEntity The storage entity to reindex
*/
- void reindex(T theResource, ResourceTable theEntity);
+ void reindex(T theResource, IBasePersistedResource theEntity);
void removeTag(IIdType theId, TagTypeEnum theTagType, String theSystem, String theCode, RequestDetails theRequestDetails);
@@ -317,7 +322,7 @@ public interface IFhirResourceDao extends IDao {
/**
* Delete a list of resource Pids
- *
+ *
* CAUTION: This list does not throw an exception if there are delete conflicts. It should always be followed by
* a call to DeleteConflictUtil.validateDeleteConflictsEmptyOrThrowException(fhirContext, conflicts);
* to actually throw the exception. The reason this method doesn't do that itself is that it is expected to be
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java
index c23d2029b2c..e7b0ab1e0e3 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/model/DeleteMethodOutcome.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.api.model;
* #L%
*/
-import ca.uhn.fhir.jpa.model.entity.ResourceTable;
+import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.rest.api.MethodOutcome;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
@@ -32,7 +32,7 @@ import java.util.List;
*/
public class DeleteMethodOutcome extends MethodOutcome {
- private List myDeletedEntities;
+ private List extends IBasePersistedResource> myDeletedEntities;
@Deprecated
private long myExpungedResourcesCount;
@Deprecated
@@ -45,21 +45,21 @@ public class DeleteMethodOutcome extends MethodOutcome {
super(theBaseOperationOutcome);
}
- public List getDeletedEntities() {
+ public List extends IBasePersistedResource> getDeletedEntities() {
return myDeletedEntities;
}
/**
- * Use {@link ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter#ENTITY_TOTAL_UPDATED_OR_DELETED}
+ * Use {@literal ca.uhn.fhir.jpa.batch.writer.SqlExecutorWriter#ENTITY_TOTAL_UPDATED_OR_DELETED}
*/
@Deprecated
- public DeleteMethodOutcome setDeletedEntities(List theDeletedEntities) {
+ public DeleteMethodOutcome setDeletedEntities(List extends IBasePersistedResource> theDeletedEntities) {
myDeletedEntities = theDeletedEntities;
return this;
}
/**
- * Use {@link ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener#RESOURCE_TOTAL_PROCESSED}
+ * Use {@literal ca.uhn.fhir.jpa.batch.listener.PidReaderCounterListener#RESOURCE_TOTAL_PROCESSED}
*/
@Deprecated
public long getExpungedResourcesCount() {
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java
index 8c7daddf474..6fa0e69aaa0 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/api/svc/ISearchCoordinatorSvc.java
@@ -36,7 +36,7 @@ public interface ISearchCoordinatorSvc {
void cancelAllActiveSearches();
- List getResources(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails);
+ List getResources(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId);
IBundleProvider registerSearch(IFhirResourceDao> theCallingDao, SearchParameterMap theParams, String theResourceType, CacheControlDirective theCacheControlDirective, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId);
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
index 43c1de97dab..7c99f470d00 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
@@ -137,7 +137,7 @@ public abstract class BaseTransactionProcessor {
public static final String URN_PREFIX = "urn:";
public static final String URN_PREFIX_ESCAPED = UrlUtil.escapeUrlParam(URN_PREFIX);
- public static final Pattern UNQUALIFIED_MATCH_URL_START = Pattern.compile("^[a-zA-Z0-9_]+=");
+ public static final Pattern UNQUALIFIED_MATCH_URL_START = Pattern.compile("^[a-zA-Z0-9_-]+=");
public static final Pattern INVALID_PLACEHOLDER_PATTERN = Pattern.compile("[a-zA-Z]+:.*");
private static final Logger ourLog = LoggerFactory.getLogger(BaseTransactionProcessor.class);
@Autowired
@@ -978,8 +978,8 @@ public abstract class BaseTransactionProcessor {
matchUrl = performIdSubstitutionsInMatchUrl(theIdSubstitutions, matchUrl);
DeleteMethodOutcome deleteOutcome = dao.deleteByUrl(matchUrl, deleteConflicts, theRequest);
setConditionalUrlToBeValidatedLater(conditionalUrlToIdMap, matchUrl, deleteOutcome.getId());
- List allDeleted = deleteOutcome.getDeletedEntities();
- for (ResourceTable deleted : allDeleted) {
+ List extends IBasePersistedResource> allDeleted = deleteOutcome.getDeletedEntities();
+ for (IBasePersistedResource deleted : allDeleted) {
deletedResources.add(deleted.getIdDt().toUnqualifiedVersionless().getValueAsString());
}
if (allDeleted.isEmpty()) {
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java
index 1ac6a1c6bed..f7101250b4e 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/dao/tx/HapiTransactionService.java
@@ -27,6 +27,7 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy;
import ca.uhn.fhir.jpa.dao.DaoFailureUtil;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
@@ -38,24 +39,24 @@ import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
import ca.uhn.fhir.util.ICallable;
import ca.uhn.fhir.util.TestUtil;
import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataIntegrityViolationException;
-import org.springframework.orm.jpa.JpaDialect;
-import org.springframework.orm.jpa.JpaTransactionManager;
-import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
+import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
+import java.util.Objects;
import java.util.concurrent.Callable;
/**
@@ -73,7 +74,9 @@ public class HapiTransactionService implements IHapiTransactionService {
protected PlatformTransactionManager myTransactionManager;
@Autowired
protected IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
- private volatile Boolean myCustomIsolationSupported;
+ @Autowired
+ protected PartitionSettings myPartitionSettings;
+ private Propagation myTransactionPropagationWhenChangingPartitions = Propagation.REQUIRED;
@VisibleForTesting
public void setInterceptorBroadcaster(IInterceptorBroadcaster theInterceptorBroadcaster) {
@@ -156,15 +159,7 @@ public class HapiTransactionService implements IHapiTransactionService {
}
public boolean isCustomIsolationSupported() {
- if (myCustomIsolationSupported == null) {
- if (myTransactionManager instanceof JpaTransactionManager) {
- JpaDialect jpaDialect = ((JpaTransactionManager) myTransactionManager).getJpaDialect();
- myCustomIsolationSupported = (jpaDialect instanceof HibernateJpaDialect);
- } else {
- myCustomIsolationSupported = false;
- }
- }
- return myCustomIsolationSupported;
+ return false;
}
@VisibleForTesting
@@ -197,6 +192,30 @@ public class HapiTransactionService implements IHapiTransactionService {
ourRequestPartitionThreadLocal.set(requestPartitionId);
}
+ if (Objects.equals(previousRequestPartitionId, requestPartitionId)) {
+ if (canReuseExistingTransaction(theExecutionBuilder)) {
+ /*
+ * If we're already in an active transaction, and it's for the right partition,
+ * and it's not a read-only transaction, we don't need to open a new transaction
+ * so let's just add a method to the stack trace that makes this obvious.
+ */
+ return executeInExistingTransaction(theCallback);
+ }
+ } else if (myTransactionPropagationWhenChangingPartitions == Propagation.REQUIRES_NEW) {
+ return executeInNewTransactionForPartitionChange(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId);
+ }
+
+ return doExecuteInTransaction(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId);
+ }
+
+ @Nullable
+ private T executeInNewTransactionForPartitionChange(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback, RequestPartitionId requestPartitionId, RequestPartitionId previousRequestPartitionId) {
+ theExecutionBuilder.myPropagation = myTransactionPropagationWhenChangingPartitions;
+ return doExecuteInTransaction(theExecutionBuilder, theCallback, requestPartitionId, previousRequestPartitionId);
+ }
+
+ @Nullable
+ private T doExecuteInTransaction(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback, RequestPartitionId requestPartitionId, RequestPartitionId previousRequestPartitionId) {
try {
for (int i = 0; ; i++) {
try {
@@ -270,6 +289,11 @@ public class HapiTransactionService implements IHapiTransactionService {
}
}
+ public void setTransactionPropagationWhenChangingPartitions(Propagation theTransactionPropagationWhenChangingPartitions) {
+ Validate.notNull(theTransactionPropagationWhenChangingPartitions);
+ myTransactionPropagationWhenChangingPartitions = theTransactionPropagationWhenChangingPartitions;
+ }
+
@Nullable
protected T doExecuteCallback(ExecutionBuilder theExecutionBuilder, TransactionCallback theCallback) {
try {
@@ -387,6 +411,22 @@ public class HapiTransactionService implements IHapiTransactionService {
}
}
+ /**
+ * Returns true if we alreadyt have an active transaction associated with the current thread, AND
+ * either it's non-read-only or we only need a read-only transaction, AND
+ * the newly requested transaction has a propagation of REQUIRED
+ */
+ private static boolean canReuseExistingTransaction(ExecutionBuilder theExecutionBuilder) {
+ return TransactionSynchronizationManager.isActualTransactionActive()
+ && (!TransactionSynchronizationManager.isCurrentTransactionReadOnly() || theExecutionBuilder.myReadOnly)
+ && (theExecutionBuilder.myPropagation == null || theExecutionBuilder.myPropagation == Propagation.REQUIRED);
+ }
+
+ @Nullable
+ private static T executeInExistingTransaction(TransactionCallback theCallback) {
+ return theCallback.doInTransaction(null);
+ }
+
/**
* Invokes {@link Callable#call()} and rethrows any exceptions thrown by that method.
* If the exception extends {@link BaseServerResponseException} it is rethrown unmodified.
@@ -415,4 +455,18 @@ public class HapiTransactionService implements IHapiTransactionService {
public static RequestPartitionId getRequestPartitionAssociatedWithThread() {
return ourRequestPartitionThreadLocal.get();
}
+
+ /**
+ * Throws an {@link IllegalArgumentException} if a transaction is active
+ */
+ public static void noTransactionAllowed() {
+ Validate.isTrue(!TransactionSynchronizationManager.isActualTransactionActive(), "Transaction must not be active but found an active transaction");
+ }
+
+ /**
+ * Throws an {@link IllegalArgumentException} if no transaction is active
+ */
+ public static void requireTransaction() {
+ Validate.isTrue(TransactionSynchronizationManager.isActualTransactionActive(), "Transaction required here but no active transaction found");
+ }
}
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java
index 8bbe8b979ec..d9391a4d7e7 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/interceptor/PatientIdPartitionInterceptor.java
@@ -29,6 +29,7 @@ import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.extractor.BaseSearchParamExtractor;
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
@@ -65,6 +66,9 @@ public class PatientIdPartitionInterceptor {
@Autowired
private ISearchParamExtractor mySearchParamExtractor;
+ @Autowired
+ private PartitionSettings myPartitionSettings;
+
/**
* Constructor
*/
@@ -75,10 +79,11 @@ public class PatientIdPartitionInterceptor {
/**
* Constructor
*/
- public PatientIdPartitionInterceptor(FhirContext theFhirContext, ISearchParamExtractor theSearchParamExtractor) {
+ public PatientIdPartitionInterceptor(FhirContext theFhirContext, ISearchParamExtractor theSearchParamExtractor, PartitionSettings thePartitionSettings) {
this();
myFhirContext = theFhirContext;
mySearchParamExtractor = theSearchParamExtractor;
+ myPartitionSettings = thePartitionSettings;
}
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE)
@@ -121,6 +126,9 @@ public class PatientIdPartitionInterceptor {
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
public RequestPartitionId identifyForRead(ReadPartitionIdRequestDetails theReadDetails, RequestDetails theRequestDetails) {
+ if (isBlank(theReadDetails.getResourceType())) {
+ return provideNonCompartmentMemberTypeResponse(null);
+ }
RuntimeResourceDefinition resourceDef = myFhirContext.getResourceDefinition(theReadDetails.getResourceType());
List compartmentSps = getCompartmentSearchParams(resourceDef);
if (compartmentSps.isEmpty()) {
@@ -259,7 +267,7 @@ public class PatientIdPartitionInterceptor {
@SuppressWarnings("unused")
@Nonnull
protected RequestPartitionId provideNonCompartmentMemberTypeResponse(IBaseResource theResource) {
- return RequestPartitionId.defaultPartition();
+ return RequestPartitionId.fromPartitionId(myPartitionSettings.getDefaultPartitionId());
}
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java
similarity index 99%
rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java
rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java
index 33e3846b826..4a7a58f5016 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchRuntimeDetails.java
@@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.search;
/*-
* #%L
- * HAPI FHIR JPA Model
+ * HAPI FHIR Storage api
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java
similarity index 98%
rename from hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java
rename to hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java
index 152e72ed206..5d5be1f3f5d 100644
--- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/model/search/SearchStatusEnum.java
@@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.model.search;
/*-
* #%L
- * HAPI FHIR JPA Model
+ * HAPI FHIR Storage api
* %%
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
* %%
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java
index fec84bc0078..34c84c12f65 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/partition/BaseRequestPartitionHelperSvc.java
@@ -29,7 +29,6 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
-import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.SystemRequestDetails;
@@ -47,7 +46,6 @@ import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
-import static ca.uhn.fhir.jpa.model.util.JpaConstants.ALL_PARTITIONS_NAME;
import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.doCallHooks;
import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.doCallHooksAndReturnObject;
import static ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster.hasHooks;
@@ -94,39 +92,44 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition
*/
@Nonnull
@Override
- public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType, ReadPartitionIdRequestDetails theDetails) {
+ public RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails) {
RequestPartitionId requestPartitionId;
- boolean nonPartitionableResource = !isResourcePartitionable(theResourceType);
+ String resourceType = theDetails != null ? theDetails.getResourceType() : null;
+ boolean nonPartitionableResource = !isResourcePartitionable(resourceType);
if (myPartitionSettings.isPartitioningEnabled()) {
- // Handle system requests
+
+ RequestDetails requestDetails = theRequest;
//TODO GGG eventually, theRequest will not be allowed to be null here, and we will pass through SystemRequestDetails instead.
- if ((theRequest == null || theRequest instanceof SystemRequestDetails) && nonPartitionableResource) {
- return RequestPartitionId.defaultPartition();
+ if (requestDetails == null) {
+ requestDetails = new SystemRequestDetails();
}
- if (theRequest instanceof SystemRequestDetails && systemRequestHasExplicitPartition((SystemRequestDetails) theRequest)) {
- requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) theRequest, nonPartitionableResource);
- } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, myInterceptorBroadcaster, theRequest)) {
+ // Handle system requests
+ if (requestDetails instanceof SystemRequestDetails && systemRequestHasExplicitPartition((SystemRequestDetails) requestDetails) && !nonPartitionableResource) {
+ requestPartitionId = getSystemRequestPartitionId((SystemRequestDetails) requestDetails, nonPartitionableResource);
+ } else if ((requestDetails instanceof SystemRequestDetails) && nonPartitionableResource) {
+ return RequestPartitionId.fromPartitionId(myPartitionSettings.getDefaultPartitionId());
+ } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, myInterceptorBroadcaster, requestDetails)) {
// Interceptor call: STORAGE_PARTITION_IDENTIFY_ANY
HookParams params = new HookParams()
- .add(RequestDetails.class, theRequest)
- .addIfMatchesType(ServletRequestDetails.class, theRequest);
- requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params);
- } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, theRequest)) {
+ .add(RequestDetails.class, requestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, requestDetails);
+ requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, requestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_ANY, params);
+ } else if (hasHooks(Pointcut.STORAGE_PARTITION_IDENTIFY_READ, myInterceptorBroadcaster, requestDetails)) {
// Interceptor call: STORAGE_PARTITION_IDENTIFY_READ
HookParams params = new HookParams()
- .add(RequestDetails.class, theRequest)
- .addIfMatchesType(ServletRequestDetails.class, theRequest)
+ .add(RequestDetails.class, requestDetails)
+ .addIfMatchesType(ServletRequestDetails.class, requestDetails)
.add(ReadPartitionIdRequestDetails.class, theDetails);
- requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PARTITION_IDENTIFY_READ, params);
+ requestPartitionId = (RequestPartitionId) doCallHooksAndReturnObject(myInterceptorBroadcaster, requestDetails, Pointcut.STORAGE_PARTITION_IDENTIFY_READ, params);
} else {
requestPartitionId = null;
}
validateRequestPartitionNotNull(requestPartitionId, Pointcut.STORAGE_PARTITION_IDENTIFY_READ);
- return validateNormalizeAndNotifyHooksForRead(requestPartitionId, theRequest, theResourceType);
+ return validateNormalizeAndNotifyHooksForRead(requestPartitionId, requestDetails, resourceType);
}
return RequestPartitionId.allPartitions();
@@ -242,16 +245,6 @@ public abstract class BaseRequestPartitionHelperSvc implements IRequestPartition
return theRequest.getRequestPartitionId() != null || theRequest.getTenantId() != null;
}
- @Nonnull
- @Override
- public PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId) {
- Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull();
- if (partitionId == null) {
- partitionId = myPartitionSettings.getDefaultPartitionId();
- }
- return new PartitionablePartitionId(partitionId, theRequestPartitionId.getPartitionDate());
- }
-
@Nonnull
@Override
public Set